[ 506.558731] env[62875]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62875) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 506.559162] env[62875]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62875) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 506.559242] env[62875]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62875) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 506.559529] env[62875]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 506.653576] env[62875]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62875) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 506.663535] env[62875]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62875) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 507.262070] env[62875]: INFO nova.virt.driver [None req-cc07726e-5721-4997-a95b-f4365ec74e90 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 507.333967] env[62875]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.334163] env[62875]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.334240] env[62875]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62875) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 510.491176] env[62875]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-52df6a63-3fbb-4ddc-9984-ccdc56b799f7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.507145] env[62875]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62875) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 510.507300] env[62875]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-63be7818-d723-47c4-930b-d8e746b7de1f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.542920] env[62875]: INFO oslo_vmware.api [-] Successfully established new session; session ID is bc585. [ 510.543121] env[62875]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.209s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 510.543615] env[62875]: INFO nova.virt.vmwareapi.driver [None req-cc07726e-5721-4997-a95b-f4365ec74e90 None None] VMware vCenter version: 7.0.3 [ 510.547156] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0ea1c0-3438-4bdd-84e0-b7f076e664a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.565821] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96992557-8b53-4a14-aebb-d4cbeff9791c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.572339] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a369642-2731-4fdf-a666-e5f1dce60c6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.579936] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee586c3b-5a2c-491f-bf87-f7bcb8410795 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.593575] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c2af58-1155-476d-8679-880bf3ccd06c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.600538] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b431ac35-b7aa-4054-ac73-bee08b4dfc05 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.632437] env[62875]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-0442a9de-271f-4816-a3c8-55f964ca84d1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.638552] env[62875]: DEBUG nova.virt.vmwareapi.driver [None req-cc07726e-5721-4997-a95b-f4365ec74e90 None None] Extension org.openstack.compute already exists. {{(pid=62875) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 510.641233] env[62875]: INFO nova.compute.provider_config [None req-cc07726e-5721-4997-a95b-f4365ec74e90 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 511.145131] env[62875]: DEBUG nova.context [None req-cc07726e-5721-4997-a95b-f4365ec74e90 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),15c53ca6-a4f5-4a33-9f03-32afde92c264(cell1) {{(pid=62875) load_cells /opt/stack/nova/nova/context.py:464}} [ 511.147698] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.147964] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.148845] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 511.149369] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Acquiring lock "15c53ca6-a4f5-4a33-9f03-32afde92c264" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.149597] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Lock "15c53ca6-a4f5-4a33-9f03-32afde92c264" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.150790] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Lock "15c53ca6-a4f5-4a33-9f03-32afde92c264" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 511.171919] env[62875]: INFO dbcounter [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Registered counter for database nova_cell0 [ 511.180009] env[62875]: INFO dbcounter [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Registered counter for database nova_cell1 [ 511.183320] env[62875]: DEBUG oslo_db.sqlalchemy.engines [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62875) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 511.183677] env[62875]: DEBUG oslo_db.sqlalchemy.engines [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62875) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 511.188632] env[62875]: ERROR nova.db.main.api [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 511.188632] env[62875]: result = function(*args, **kwargs) [ 511.188632] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 511.188632] env[62875]: return func(*args, **kwargs) [ 511.188632] env[62875]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 511.188632] env[62875]: result = fn(*args, **kwargs) [ 511.188632] env[62875]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 511.188632] env[62875]: return f(*args, **kwargs) [ 511.188632] env[62875]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 511.188632] env[62875]: return db.service_get_minimum_version(context, binaries) [ 511.188632] env[62875]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 511.188632] env[62875]: _check_db_access() [ 511.188632] env[62875]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 511.188632] env[62875]: stacktrace = ''.join(traceback.format_stack()) [ 511.188632] env[62875]: [ 511.189471] env[62875]: ERROR nova.db.main.api [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 511.189471] env[62875]: result = function(*args, **kwargs) [ 511.189471] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 511.189471] env[62875]: return func(*args, **kwargs) [ 511.189471] env[62875]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 511.189471] env[62875]: result = fn(*args, **kwargs) [ 511.189471] env[62875]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 511.189471] env[62875]: return f(*args, **kwargs) [ 511.189471] env[62875]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 511.189471] env[62875]: return db.service_get_minimum_version(context, binaries) [ 511.189471] env[62875]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 511.189471] env[62875]: _check_db_access() [ 511.189471] env[62875]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 511.189471] env[62875]: stacktrace = ''.join(traceback.format_stack()) [ 511.189471] env[62875]: [ 511.189843] env[62875]: WARNING nova.objects.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 511.190014] env[62875]: WARNING nova.objects.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Failed to get minimum service version for cell 15c53ca6-a4f5-4a33-9f03-32afde92c264 [ 511.190460] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Acquiring lock "singleton_lock" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 511.190638] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Acquired lock "singleton_lock" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 511.190881] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Releasing lock "singleton_lock" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 511.191239] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Full set of CONF: {{(pid=62875) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 511.191392] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ******************************************************************************** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 511.191525] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] Configuration options gathered from: {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 511.191662] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 511.191854] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 511.191985] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ================================================================================ {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 511.192213] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] allow_resize_to_same_host = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.192386] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] arq_binding_timeout = 300 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.192522] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] backdoor_port = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.192654] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] backdoor_socket = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.192820] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] block_device_allocate_retries = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.192983] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] block_device_allocate_retries_interval = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.193171] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cert = self.pem {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.193344] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.193520] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute_monitors = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.193693] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] config_dir = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.193865] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] config_drive_format = iso9660 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.194042] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.194294] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] config_source = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.194517] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] console_host = devstack {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.194692] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] control_exchange = nova {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.194860] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cpu_allocation_ratio = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.195036] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] daemon = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.195211] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] debug = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.195371] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] default_access_ip_network_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.195560] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] default_availability_zone = nova {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.195728] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] default_ephemeral_format = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.195892] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] default_green_pool_size = 1000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.196150] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.196319] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] default_schedule_zone = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.196532] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] disk_allocation_ratio = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.196758] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] enable_new_services = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.196942] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] enabled_apis = ['osapi_compute'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.197123] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] enabled_ssl_apis = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.197289] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] flat_injected = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.197456] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] force_config_drive = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.197611] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] force_raw_images = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.197783] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] graceful_shutdown_timeout = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.197947] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] heal_instance_info_cache_interval = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.198182] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] host = cpu-1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.198369] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.198567] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] initial_disk_allocation_ratio = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.198745] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] initial_ram_allocation_ratio = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.198963] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.199149] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] instance_build_timeout = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.199313] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] instance_delete_interval = 300 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.199482] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] instance_format = [instance: %(uuid)s] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.199684] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] instance_name_template = instance-%08x {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.199857] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] instance_usage_audit = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.200043] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] instance_usage_audit_period = month {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.200215] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.200437] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] instances_path = /opt/stack/data/nova/instances {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.200557] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] internal_service_availability_zone = internal {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.200715] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] key = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.200877] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] live_migration_retry_count = 30 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.201064] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_color = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.201239] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_config_append = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.201411] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.201576] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_dir = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.201735] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_file = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.201863] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_options = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.202034] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_rotate_interval = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.202209] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_rotate_interval_type = days {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.202376] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] log_rotation_type = none {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.202506] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.202634] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.202804] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.202970] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.203112] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.203277] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] long_rpc_timeout = 1800 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.203447] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] max_concurrent_builds = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.203607] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] max_concurrent_live_migrations = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.203766] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] max_concurrent_snapshots = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.203925] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] max_local_block_devices = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.204097] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] max_logfile_count = 30 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.204261] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] max_logfile_size_mb = 200 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.204460] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] maximum_instance_delete_attempts = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.204636] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] metadata_listen = 0.0.0.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.204805] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] metadata_listen_port = 8775 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.204975] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] metadata_workers = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.205154] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] migrate_max_retries = -1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.205345] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] mkisofs_cmd = genisoimage {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.205578] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] my_block_storage_ip = 10.180.1.21 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.205722] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] my_ip = 10.180.1.21 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.205933] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.206113] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] network_allocate_retries = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.206294] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.206464] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] osapi_compute_listen = 0.0.0.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.206630] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] osapi_compute_listen_port = 8774 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.206798] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] osapi_compute_unique_server_name_scope = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.206970] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] osapi_compute_workers = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.207147] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] password_length = 12 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.207309] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] periodic_enable = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.207469] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] periodic_fuzzy_delay = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.207637] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] pointer_model = usbtablet {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.207802] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] preallocate_images = none {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.207994] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] publish_errors = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.208102] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] pybasedir = /opt/stack/nova {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.208262] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ram_allocation_ratio = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.208456] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] rate_limit_burst = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.208594] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] rate_limit_except_level = CRITICAL {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.208754] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] rate_limit_interval = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.208913] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] reboot_timeout = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.209086] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] reclaim_instance_interval = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.209247] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] record = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.209416] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] reimage_timeout_per_gb = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.209583] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] report_interval = 120 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.209746] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] rescue_timeout = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.209907] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] reserved_host_cpus = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.210078] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] reserved_host_disk_mb = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.210239] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] reserved_host_memory_mb = 512 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.210401] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] reserved_huge_pages = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.210570] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] resize_confirm_window = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.210725] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] resize_fs_using_block_device = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.210885] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] resume_guests_state_on_host_boot = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.211065] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.211233] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] rpc_response_timeout = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.211393] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] run_external_periodic_tasks = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.211565] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] running_deleted_instance_action = reap {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.211751] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] running_deleted_instance_poll_interval = 1800 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.211916] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] running_deleted_instance_timeout = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.212089] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler_instance_sync_interval = 120 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.212264] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_down_time = 720 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.212431] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] servicegroup_driver = db {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.212592] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] shell_completion = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.212751] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] shelved_offload_time = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.212908] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] shelved_poll_interval = 3600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.213088] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] shutdown_timeout = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.213248] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] source_is_ipv6 = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.213405] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ssl_only = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.213650] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.213818] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] sync_power_state_interval = 600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.213980] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] sync_power_state_pool_size = 1000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.214163] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] syslog_log_facility = LOG_USER {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.214320] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] tempdir = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.214523] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] timeout_nbd = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.214697] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] transport_url = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.214860] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] update_resources_interval = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.215034] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] use_cow_images = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.215198] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] use_journal = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.215357] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] use_json = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.215537] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] use_rootwrap_daemon = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.215702] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] use_stderr = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.215860] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] use_syslog = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.216076] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vcpu_pin_set = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.216274] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plugging_is_fatal = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.216449] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plugging_timeout = 300 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.216618] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] virt_mkfs = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.216782] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] volume_usage_poll_interval = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.216945] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] watch_log_file = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.217127] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] web = /usr/share/spice-html5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 511.217314] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.217483] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.217650] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.217823] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_concurrency.disable_process_locking = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.218413] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.218612] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.218787] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.218963] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.219152] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.219325] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.219510] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.auth_strategy = keystone {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.219680] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.compute_link_prefix = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.219855] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.220042] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.dhcp_domain = novalocal {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.220218] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.enable_instance_password = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.220387] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.glance_link_prefix = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.220559] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.220733] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.220899] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.instance_list_per_project_cells = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.221075] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.list_records_by_skipping_down_cells = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.221243] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.local_metadata_per_cell = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.221411] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.max_limit = 1000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.221582] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.metadata_cache_expiration = 15 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.221755] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.neutron_default_tenant_id = default {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.221923] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.response_validation = warn {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.222104] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.use_neutron_default_nets = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.222276] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.222439] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.222608] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.222781] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.222952] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.vendordata_dynamic_targets = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.223129] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.vendordata_jsonfile_path = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.223311] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.223504] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.backend = dogpile.cache.memcached {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.223698] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.backend_argument = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.223876] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.backend_expiration_time = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.224062] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.config_prefix = cache.oslo {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.224237] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.dead_timeout = 60.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.224419] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.debug_cache_backend = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.224593] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.enable_retry_client = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.225670] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.enable_socket_keepalive = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.225670] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.enabled = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.225670] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.enforce_fips_mode = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.225670] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.expiration_time = 600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.225670] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.hashclient_retry_attempts = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.225670] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.hashclient_retry_delay = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.225912] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_dead_retry = 300 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.225912] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_password = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.226026] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.226200] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.226365] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_pool_maxsize = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.226556] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.226735] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_sasl_enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.226915] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.227099] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_socket_timeout = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.227262] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.memcache_username = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.227427] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.proxies = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.227586] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.redis_db = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.227740] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.redis_password = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.227904] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.redis_sentinel_service_name = mymaster {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.228087] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.228256] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.redis_server = localhost:6379 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.228426] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.redis_socket_timeout = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231044] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.redis_username = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231044] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.retry_attempts = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231044] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.retry_delay = 0.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231044] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.socket_keepalive_count = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231044] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.socket_keepalive_idle = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231044] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.socket_keepalive_interval = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231044] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.tls_allowed_ciphers = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231301] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.tls_cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231301] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.tls_certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231301] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.tls_enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231301] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cache.tls_keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231301] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231301] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.auth_type = password {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231301] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231478] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.catalog_info = volumev3::publicURL {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231478] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231478] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231478] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.cross_az_attach = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231478] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.debug = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231609] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.endpoint_template = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231763] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.http_retries = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.231865] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.232031] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.232208] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.os_region_name = RegionOne {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.232378] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.232536] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cinder.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.232706] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.232863] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.cpu_dedicated_set = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.233030] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.cpu_shared_set = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.233200] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.image_type_exclude_list = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.233365] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.233529] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.max_concurrent_disk_ops = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.233689] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.max_disk_devices_to_attach = -1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.233850] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.234024] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.234195] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.resource_provider_association_refresh = 300 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.234383] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.234555] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.shutdown_retry_interval = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.234741] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.234919] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] conductor.workers = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.235119] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] console.allowed_origins = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.235282] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] console.ssl_ciphers = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.235493] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] console.ssl_minimum_version = default {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.235687] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] consoleauth.enforce_session_timeout = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.235863] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] consoleauth.token_ttl = 600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.236063] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.236207] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.236374] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.236535] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.connect_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.236694] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.connect_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.236854] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.endpoint_override = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.237026] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.237205] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.237351] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.max_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.237544] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.min_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.237710] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.region_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.237871] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.retriable_status_codes = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.238042] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.238218] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.service_type = accelerator {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.238443] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.238617] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.status_code_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.238779] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.status_code_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.238938] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.239135] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.239302] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] cyborg.version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.239474] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.asyncio_connection = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.239636] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.asyncio_slave_connection = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.239808] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.backend = sqlalchemy {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.239977] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.connection = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.240159] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.connection_debug = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.240331] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.connection_parameters = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.240500] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.connection_recycle_time = 3600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.240665] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.connection_trace = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.240828] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.db_inc_retry_interval = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.240992] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.db_max_retries = 20 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.241172] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.db_max_retry_interval = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.241340] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.db_retry_interval = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.241505] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.max_overflow = 50 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.241669] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.max_pool_size = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.241833] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.max_retries = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.242010] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.242177] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.mysql_wsrep_sync_wait = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.242336] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.pool_timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.242497] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.retry_interval = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.242702] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.slave_connection = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.242876] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.sqlite_synchronous = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.243051] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] database.use_db_reconnect = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.243226] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.asyncio_connection = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.243386] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.asyncio_slave_connection = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.243558] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.backend = sqlalchemy {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.243743] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.connection = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.243912] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.connection_debug = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.244106] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.connection_parameters = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.244279] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.connection_recycle_time = 3600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.244447] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.connection_trace = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.244611] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.db_inc_retry_interval = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.244777] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.db_max_retries = 20 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.244944] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.db_max_retry_interval = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.245126] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.db_retry_interval = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.245291] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.max_overflow = 50 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.245504] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.max_pool_size = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.245732] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.max_retries = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.245917] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.246094] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.246275] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.pool_timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.246424] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.retry_interval = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.246586] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.slave_connection = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.246749] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] api_database.sqlite_synchronous = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.246927] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] devices.enabled_mdev_types = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.247120] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.247318] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ephemeral_storage_encryption.default_format = luks {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.247458] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ephemeral_storage_encryption.enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.247625] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.247801] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.api_servers = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.247968] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.248148] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.248316] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.248479] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.connect_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.248638] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.connect_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.248800] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.debug = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.248964] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.default_trusted_certificate_ids = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.249143] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.enable_certificate_validation = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.249305] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.enable_rbd_download = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.249491] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.endpoint_override = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.249680] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.249846] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.250015] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.max_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.250183] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.min_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.250352] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.num_retries = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.250525] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.rbd_ceph_conf = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.250744] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.rbd_connect_timeout = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.250950] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.rbd_pool = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.251140] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.rbd_user = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.251314] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.region_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.251480] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.retriable_status_codes = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.251645] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.251815] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.service_type = image {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.251980] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.252158] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.status_code_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.252321] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.status_code_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.252793] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.252998] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.253188] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.verify_glance_signatures = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.253358] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] glance.version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.253532] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] guestfs.debug = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.253704] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.253869] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.auth_type = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.254042] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.254208] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.254390] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.254567] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.connect_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.254731] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.connect_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.254890] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.endpoint_override = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.255069] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.255232] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.255421] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.max_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.255618] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.min_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.255789] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.region_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.255949] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.retriable_status_codes = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.256126] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.256300] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.service_type = shared-file-system {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.256469] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.share_apply_policy_timeout = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.256631] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.256793] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.status_code_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.256953] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.status_code_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.257128] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.257313] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.257480] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] manila.version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.257689] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] mks.enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.258051] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.258251] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] image_cache.manager_interval = 2400 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.258427] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] image_cache.precache_concurrency = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.258605] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] image_cache.remove_unused_base_images = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.258774] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.258944] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.259136] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] image_cache.subdirectory_name = _base {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.259316] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.api_max_retries = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.259485] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.api_retry_interval = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.259648] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.259812] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.auth_type = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.259973] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.260146] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.260313] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.260479] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.conductor_group = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.260640] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.connect_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.260799] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.connect_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.260957] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.endpoint_override = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.261134] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.261295] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.261461] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.max_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.261660] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.min_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.261833] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.peer_list = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.261994] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.region_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.262169] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.retriable_status_codes = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.262335] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.serial_console_state_timeout = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.262496] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.262670] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.service_type = baremetal {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.262829] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.shard = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.262993] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.263167] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.status_code_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.263326] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.status_code_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.263484] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.263667] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.263831] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ironic.version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.264019] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.264201] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] key_manager.fixed_key = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.264402] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.264581] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.barbican_api_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.264744] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.barbican_endpoint = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.264918] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.barbican_endpoint_type = public {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.265090] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.barbican_region_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.265258] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.265443] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.265642] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.265807] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.265969] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.266149] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.number_of_retries = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.266312] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.retry_delay = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.266496] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.send_service_user_token = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.266658] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.266820] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.266982] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.verify_ssl = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.267155] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican.verify_ssl_path = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.267324] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.267546] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.auth_type = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.267645] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.267805] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.267970] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.268155] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.268316] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.268480] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.268646] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] barbican_service_user.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.268804] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.approle_role_id = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.268965] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.approle_secret_id = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.269150] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.kv_mountpoint = secret {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.269313] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.kv_path = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.269479] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.kv_version = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.269640] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.namespace = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.269800] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.root_token_id = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.269957] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.ssl_ca_crt_file = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.270140] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.timeout = 60.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.270307] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.use_ssl = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.270479] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.270662] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.270819] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.270984] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.271159] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.connect_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.271319] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.connect_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.271479] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.endpoint_override = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.271643] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.271801] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.271960] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.max_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.272133] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.min_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.272291] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.region_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.272452] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.retriable_status_codes = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.272616] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.272785] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.service_type = identity {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.272950] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.273125] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.status_code_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.273289] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.status_code_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.273448] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.273661] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.273836] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] keystone.version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.274036] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.ceph_mount_options = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.274371] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.274563] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.connection_uri = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.274734] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.cpu_mode = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.274905] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.cpu_model_extra_flags = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.275090] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.cpu_models = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.275270] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.cpu_power_governor_high = performance {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.275467] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.cpu_power_governor_low = powersave {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.275642] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.cpu_power_management = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.275819] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.275995] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.device_detach_attempts = 8 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.276181] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.device_detach_timeout = 20 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.276355] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.disk_cachemodes = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.276527] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.disk_prefix = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.276691] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.enabled_perf_events = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.276857] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.file_backed_memory = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.277034] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.gid_maps = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.277201] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.hw_disk_discard = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.277361] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.hw_machine_type = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.277531] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.images_rbd_ceph_conf = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.277699] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.277863] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.278044] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.images_rbd_glance_store_name = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.278219] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.images_rbd_pool = rbd {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.278391] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.images_type = default {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.278554] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.images_volume_group = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.278756] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.inject_key = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.278881] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.inject_partition = -2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.279056] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.inject_password = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.279225] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.iscsi_iface = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.279391] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.iser_use_multipath = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.279558] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_bandwidth = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.279722] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.279889] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_downtime = 500 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.280066] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.280235] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.280397] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_inbound_addr = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.280566] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.280729] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_permit_post_copy = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.280889] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_scheme = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.281080] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_timeout_action = abort {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.281244] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_tunnelled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.281404] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_uri = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.281569] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.live_migration_with_native_tls = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.281729] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.max_queues = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.281894] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.282145] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.282315] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.nfs_mount_options = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.282600] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.282781] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.282947] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.num_iser_scan_tries = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.283125] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.num_memory_encrypted_guests = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.283303] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.283474] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.num_pcie_ports = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.283643] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.num_volume_scan_tries = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.283813] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.pmem_namespaces = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.283975] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.quobyte_client_cfg = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.284287] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.284488] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rbd_connect_timeout = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.284671] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.284840] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.285011] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rbd_secret_uuid = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.285184] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rbd_user = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.285367] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.285559] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.remote_filesystem_transport = ssh {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.285724] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rescue_image_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.285886] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rescue_kernel_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.286058] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rescue_ramdisk_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.286234] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.286396] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.rx_queue_size = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.286600] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.smbfs_mount_options = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.286866] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.287054] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.snapshot_compression = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.287224] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.snapshot_image_format = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.287447] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.287619] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.sparse_logical_volumes = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.287784] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.swtpm_enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.287957] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.swtpm_group = tss {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.288140] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.swtpm_user = tss {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.288312] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.sysinfo_serial = unique {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.288473] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.tb_cache_size = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.288633] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.tx_queue_size = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.288798] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.uid_maps = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.288964] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.use_virtio_for_bridges = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.289150] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.virt_type = kvm {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.289321] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.volume_clear = zero {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.289487] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.volume_clear_size = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.289656] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.volume_use_multipath = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.289815] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.vzstorage_cache_path = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.289984] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.290174] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.vzstorage_mount_group = qemu {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.290355] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.vzstorage_mount_opts = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.290527] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.290823] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.291018] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.vzstorage_mount_user = stack {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.291191] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.291368] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.291548] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.auth_type = password {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.291710] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.291871] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.292049] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.292215] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.connect_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.292376] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.connect_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.292549] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.default_floating_pool = public {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.292709] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.endpoint_override = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.292875] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.extension_sync_interval = 600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.293048] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.http_retries = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.293218] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.293378] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.293542] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.max_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.293719] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.293884] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.min_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.294067] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.ovs_bridge = br-int {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.294240] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.physnets = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.294440] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.region_name = RegionOne {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.294619] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.retriable_status_codes = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.294796] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.service_metadata_proxy = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.294963] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.295151] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.service_type = network {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.295321] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.295513] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.status_code_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.295683] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.status_code_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.295846] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.296043] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.296213] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] neutron.version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.296390] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] notifications.bdms_in_notifications = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.296570] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] notifications.default_level = INFO {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.296736] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] notifications.include_share_mapping = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.296913] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] notifications.notification_format = unversioned {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.297091] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] notifications.notify_on_state_change = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.297273] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.297452] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] pci.alias = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.297624] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] pci.device_spec = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.297790] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] pci.report_in_placement = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.297966] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.298156] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.auth_type = password {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.298327] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.298489] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.298660] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.298823] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.299012] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.connect_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.299188] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.connect_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.299354] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.default_domain_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.299514] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.default_domain_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.299673] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.domain_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.299831] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.domain_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.299989] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.endpoint_override = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.300168] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.300328] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.300486] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.max_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.300648] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.min_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.300815] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.password = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.300975] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.project_domain_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.301159] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.project_domain_name = Default {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.301327] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.project_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.301499] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.project_name = service {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.301668] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.region_name = RegionOne {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.301832] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.retriable_status_codes = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.301992] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.302179] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.service_type = placement {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.302348] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.302515] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.status_code_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.302676] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.status_code_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.302837] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.system_scope = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.303012] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.303182] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.trust_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.303342] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.user_domain_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.303514] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.user_domain_name = Default {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.303676] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.user_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.303850] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.username = nova {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.304041] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.304209] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] placement.version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.304407] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.cores = 20 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.304587] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.count_usage_from_placement = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.304763] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.304936] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.injected_file_content_bytes = 10240 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.305123] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.injected_file_path_length = 255 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.305292] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.injected_files = 5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.305499] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.instances = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.305681] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.key_pairs = 100 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.305851] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.metadata_items = 128 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.306031] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.ram = 51200 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.306205] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.recheck_quota = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.306379] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.server_group_members = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.306551] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.server_groups = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.306801] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.306942] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] quota.unified_limits_resource_strategy = require {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.307124] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.307293] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.307457] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.image_metadata_prefilter = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.307624] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.307790] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.max_attempts = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.307961] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.max_placement_results = 1000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.308136] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.308303] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.query_placement_for_image_type_support = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.308467] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.308642] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] scheduler.workers = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.308813] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.308983] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.309177] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.309353] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.309523] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.309688] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.309854] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.310052] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.310228] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.host_subset_size = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.310398] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.310560] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.310728] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.310895] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.isolated_hosts = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.311071] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.isolated_images = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.311255] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.311419] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.311587] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.311754] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.pci_in_placement = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.311919] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.312101] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.312268] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.312434] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.312601] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.312766] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.312928] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.track_instance_changes = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.313119] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.313292] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] metrics.required = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.313461] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] metrics.weight_multiplier = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.313629] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.313806] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] metrics.weight_setting = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.314183] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.314397] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] serial_console.enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.314580] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] serial_console.port_range = 10000:20000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.314760] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.314932] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.315114] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] serial_console.serialproxy_port = 6083 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.315284] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.315532] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.auth_type = password {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.315728] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.315894] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.316073] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.316243] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.316465] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.316657] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.send_service_user_token = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.316823] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.317039] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] service_user.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.317170] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.agent_enabled = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.317335] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.317631] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.317833] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.318009] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.html5proxy_port = 6082 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.318181] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.image_compression = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.318340] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.jpeg_compression = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.318496] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.playback_compression = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.318662] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.require_secure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.318829] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.server_listen = 127.0.0.1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.319007] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.319179] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.streaming_mode = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.319375] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] spice.zlib_compression = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.319581] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] upgrade_levels.baseapi = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.319758] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] upgrade_levels.compute = auto {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.319923] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] upgrade_levels.conductor = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.320098] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] upgrade_levels.scheduler = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.320272] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.320440] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.auth_type = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.320604] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.320766] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.320931] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.321108] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.321271] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.321437] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.321600] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vendordata_dynamic_auth.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.321777] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.api_retry_count = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.321937] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.ca_file = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.322129] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.cache_prefix = devstack-image-cache {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.322300] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.cluster_name = testcl1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.322468] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.connection_pool_size = 10 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.322629] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.console_delay_seconds = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.322798] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.datastore_regex = ^datastore.* {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.323018] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.323197] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.host_password = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.323364] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.host_port = 443 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.323535] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.host_username = administrator@vsphere.local {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.323701] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.insecure = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.323862] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.integration_bridge = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.324035] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.maximum_objects = 100 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.324199] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.pbm_default_policy = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.324375] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.pbm_enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.324551] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.pbm_wsdl_location = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.324725] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.324884] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.serial_port_proxy_uri = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.325056] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.serial_port_service_uri = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.325231] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.task_poll_interval = 0.5 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.325441] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.use_linked_clone = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.325638] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.vnc_keymap = en-us {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.325807] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.vnc_port = 5900 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.325974] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vmware.vnc_port_total = 10000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.326176] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.auth_schemes = ['none'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.326353] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.326644] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.326830] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.327033] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.novncproxy_port = 6080 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.327208] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.server_listen = 127.0.0.1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.327393] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.327556] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.vencrypt_ca_certs = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.327717] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.vencrypt_client_cert = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.327877] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vnc.vencrypt_client_key = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.328064] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.328289] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.disable_deep_image_inspection = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.328397] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.328561] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.328722] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.328883] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.disable_rootwrap = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.329054] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.enable_numa_live_migration = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.329220] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.329380] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.329539] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.329703] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.libvirt_disable_apic = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.329863] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.330037] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.330208] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.330370] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.330533] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.330696] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.330859] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.331030] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.331196] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.331365] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.331550] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.331720] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.client_socket_timeout = 900 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.331887] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.default_pool_size = 1000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.332065] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.keep_alive = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.332236] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.max_header_line = 16384 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.332399] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.secure_proxy_ssl_header = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.332562] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.ssl_ca_file = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.332722] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.ssl_cert_file = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.332882] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.ssl_key_file = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.333059] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.tcp_keepidle = 600 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.333242] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.333411] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] zvm.ca_file = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.333575] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] zvm.cloud_connector_url = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.333873] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.334063] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] zvm.reachable_timeout = 300 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.334241] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.334451] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.334716] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.connection_string = messaging:// {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.334816] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.enabled = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.334986] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.es_doc_type = notification {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.335167] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.es_scroll_size = 10000 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.335337] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.es_scroll_time = 2m {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.335525] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.filter_error_trace = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.335702] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.hmac_keys = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.335873] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.sentinel_service_name = mymaster {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.336055] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.socket_timeout = 0.1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.336222] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.trace_requests = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.336384] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler.trace_sqlalchemy = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.336566] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler_jaeger.process_tags = {} {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.336730] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler_jaeger.service_name_prefix = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.336892] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] profiler_otlp.service_name_prefix = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.337071] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] remote_debug.host = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.337252] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] remote_debug.port = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.337412] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.337575] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.337739] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.337904] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.338079] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.338246] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.338410] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.338576] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.338741] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.338911] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.339082] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.339257] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.339434] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.339599] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.339770] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.339934] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.340113] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.340287] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.340453] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.340618] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.340787] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.340953] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.341130] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.341298] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.341463] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.341627] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.341792] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.341952] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.342131] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.342296] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.ssl = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.342467] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.342634] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.342797] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.342963] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.343144] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.ssl_version = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.343307] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.343490] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.343659] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_notifications.retry = -1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.343836] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.344017] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_messaging_notifications.transport_url = **** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.344198] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.auth_section = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.344389] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.auth_type = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.344552] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.cafile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.344713] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.certfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.344877] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.collect_timing = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.345046] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.connect_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.345210] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.connect_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.345386] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.endpoint_id = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.345573] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.endpoint_interface = publicURL {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.345736] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.endpoint_override = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.345896] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.endpoint_region_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.346070] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.endpoint_service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.346235] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.endpoint_service_type = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.346399] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.insecure = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.346559] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.keyfile = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.346722] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.max_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.346876] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.min_version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.347046] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.region_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.347216] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.retriable_status_codes = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.347372] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.service_name = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.347530] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.service_type = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.347696] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.split_loggers = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.347857] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.status_code_retries = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.348025] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.status_code_retry_delay = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.348188] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.timeout = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.348350] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.valid_interfaces = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.348510] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_limit.version = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.348674] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_reports.file_event_handler = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.348841] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.349011] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] oslo_reports.log_dir = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.349192] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.349355] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.349518] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.349688] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.349857] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.350036] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.350214] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.350378] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_ovs_privileged.group = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.350537] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.350703] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.350868] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.351038] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] vif_plug_ovs_privileged.user = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.351213] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_linux_bridge.flat_interface = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.351395] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.351570] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.351739] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.351911] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.352096] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.352269] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.352431] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.352614] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.352783] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_ovs.isolate_vif = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.352953] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.353134] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.353304] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.353475] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_ovs.ovsdb_interface = native {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.353639] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] os_vif_ovs.per_port_bridge = False {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.353811] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] privsep_osbrick.capabilities = [21] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.353972] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] privsep_osbrick.group = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.354147] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] privsep_osbrick.helper_command = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.354315] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.354507] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.354673] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] privsep_osbrick.user = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.354848] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.355014] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] nova_sys_admin.group = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.355182] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] nova_sys_admin.helper_command = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.355356] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.355593] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.355771] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] nova_sys_admin.user = None {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 511.355906] env[62875]: DEBUG oslo_service.service [None req-61e0879e-f44d-46f5-ab0e-c10e4bd0a98c None None] ******************************************************************************** {{(pid=62875) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 511.356327] env[62875]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 511.860008] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Getting list of instances from cluster (obj){ [ 511.860008] env[62875]: value = "domain-c8" [ 511.860008] env[62875]: _type = "ClusterComputeResource" [ 511.860008] env[62875]: } {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 511.861114] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d585fe28-b85c-4bd0-985e-4216e0d23a65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.870107] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Got total of 0 instances {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 511.870699] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 511.871188] env[62875]: INFO nova.virt.node [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Generated node identity 2d6e5fad-ed55-4f17-b68d-be9dae183a02 [ 511.871436] env[62875]: INFO nova.virt.node [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Wrote node identity 2d6e5fad-ed55-4f17-b68d-be9dae183a02 to /opt/stack/data/n-cpu-1/compute_id [ 512.374619] env[62875]: WARNING nova.compute.manager [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Compute nodes ['2d6e5fad-ed55-4f17-b68d-be9dae183a02'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 513.380388] env[62875]: INFO nova.compute.manager [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 514.386738] env[62875]: WARNING nova.compute.manager [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 514.387126] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.387275] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.387430] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.387587] env[62875]: DEBUG nova.compute.resource_tracker [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 514.388539] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3619806-7514-4f82-9a8c-4feaccbcf68b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.396550] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c442665-9204-4b5f-8613-2fbc301cb597 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.411090] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0bfd6a-64c1-40e6-8c57-a0afec239adc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.417572] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9f5564-6f05-42a0-89d1-aed4e6e3a108 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.446607] env[62875]: DEBUG nova.compute.resource_tracker [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181292MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 514.446772] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.446971] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.949672] env[62875]: WARNING nova.compute.resource_tracker [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] No compute node record for cpu-1:2d6e5fad-ed55-4f17-b68d-be9dae183a02: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 2d6e5fad-ed55-4f17-b68d-be9dae183a02 could not be found. [ 515.453547] env[62875]: INFO nova.compute.resource_tracker [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 [ 516.962357] env[62875]: DEBUG nova.compute.resource_tracker [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 516.962702] env[62875]: DEBUG nova.compute.resource_tracker [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 517.117757] env[62875]: INFO nova.scheduler.client.report [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] [req-5a5b844c-720d-4d5b-9140-6eb3491c83aa] Created resource provider record via placement API for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 517.135793] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6ccfef-29c8-41af-8161-7f9cbca20dd9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.143612] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c16ff06-5102-4876-a7eb-1666e897116a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.173799] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a1661e-84aa-445f-9d13-64ce517cb5de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.181293] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074c84c3-48de-4d07-8f8a-8f1f2b7cd52e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.194196] env[62875]: DEBUG nova.compute.provider_tree [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 517.730859] env[62875]: DEBUG nova.scheduler.client.report [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 517.731157] env[62875]: DEBUG nova.compute.provider_tree [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 0 to 1 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 517.731308] env[62875]: DEBUG nova.compute.provider_tree [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 517.778831] env[62875]: DEBUG nova.compute.provider_tree [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 1 to 2 during operation: update_traits {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 518.283543] env[62875]: DEBUG nova.compute.resource_tracker [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 518.283987] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.837s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.283987] env[62875]: DEBUG nova.service [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Creating RPC server for service compute {{(pid=62875) start /opt/stack/nova/nova/service.py:186}} [ 518.299532] env[62875]: DEBUG nova.service [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] Join ServiceGroup membership for this service compute {{(pid=62875) start /opt/stack/nova/nova/service.py:203}} [ 518.299705] env[62875]: DEBUG nova.servicegroup.drivers.db [None req-6576274f-5e57-47ad-a28d-bbccbad72c2c None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62875) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 565.303303] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.806771] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Getting list of instances from cluster (obj){ [ 565.806771] env[62875]: value = "domain-c8" [ 565.806771] env[62875]: _type = "ClusterComputeResource" [ 565.806771] env[62875]: } {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 565.808052] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4407da44-3323-4604-85e3-55279b07dc42 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.816727] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Got total of 0 instances {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 565.816945] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.817262] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Getting list of instances from cluster (obj){ [ 565.817262] env[62875]: value = "domain-c8" [ 565.817262] env[62875]: _type = "ClusterComputeResource" [ 565.817262] env[62875]: } {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 565.818110] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00644cd5-93fa-46dc-932f-0defe0a55f2e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.825184] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Got total of 0 instances {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 566.714726] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 566.715058] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 566.715272] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 566.715396] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 567.218411] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 567.218686] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.218888] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.219115] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.219341] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.219534] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.219724] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.219888] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 567.220053] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.723510] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.723893] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.723962] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.724140] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 567.725052] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce97af73-8533-41f8-a32e-94b347f8b9b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.732768] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f472d0-3a87-4bae-b3fa-04f15d867f71 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.745830] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bbbade-bb3c-4fdd-a9a0-ae5d6ad42520 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.751849] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e907de-dea5-43c3-802f-a8321e8c67cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.780835] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181296MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 567.780961] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.781154] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.798599] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 568.798847] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 568.813296] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009d301c-b2e2-443b-b205-064158d17bb2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.821010] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d70d779-1df2-4101-901c-611b582fd7c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.850016] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d4e0d4-8477-4730-9cb4-32c6f34f3074 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.857011] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058dc9c2-7f71-448d-9ee7-22f0ab85b0da {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.869621] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.372476] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 569.877740] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 569.878129] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.097s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.865424] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.865761] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 630.371030] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 630.371030] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 630.371030] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 630.873061] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 630.873435] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 630.873435] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 630.873543] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 630.873698] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 630.873868] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 630.874047] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 630.874184] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 630.874322] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 631.378122] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.378479] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.378715] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.378929] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 631.379859] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f67f6b-2620-484a-9cc3-9ebebf589ce5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.388586] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf44bc3-30d8-425a-94d0-4e7bd771bacc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.403172] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3810009-3a08-4ffd-a6a2-cf27baa157ea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.409567] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e711fd8-cf61-409e-9478-9f56fb095bc3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.439105] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181283MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 631.439321] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.439437] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.457396] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 632.457632] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 632.471600] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a98899-16cc-4145-9d35-52cfd8d22125 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.479306] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71134bb0-6ac6-42af-bb3d-a2e93fdafdd3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.508621] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d90866a-5069-42e8-83a1-bdd0039cf975 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.515565] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479cf3e2-972c-4dc8-9c71-f500dd309d8f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.528222] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.031535] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 633.032816] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 633.033039] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.036709] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.036709] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.036709] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 693.036709] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 693.539938] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 693.539938] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.539938] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.539938] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.539938] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.539938] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.540342] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.540342] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 693.540397] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 694.043841] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.044223] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.044262] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.044442] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 694.045437] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd244182-6295-4465-9aa1-532030202f3b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.053806] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de23f974-b60b-4d0f-8f99-6414d86a6829 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.067621] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746d884e-4fe5-43e5-851d-27f3f89c270f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.073562] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15e8b5a-ac4f-4b97-9867-43874d0d60d9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.102089] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181300MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 694.102244] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.102424] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.121662] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 695.121936] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 695.137583] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00dd0e9-d649-4a6a-a418-9b2e14fb2302 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.144658] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556c532d-0d78-486e-87cc-1b853bd2b2de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.174028] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bceab5-0681-411c-8ea4-6af83e70c56c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.181259] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c94eb48-9216-43a0-94fb-ae2adb929dd8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.193653] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.697067] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 695.698374] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 695.698558] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.364726] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 754.365052] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 754.870035] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 754.870291] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 754.870359] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 755.373298] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 755.373698] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.373698] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.373893] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.373976] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.374099] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.374252] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.374420] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 755.374581] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.878240] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.878487] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.878654] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.878806] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 755.879761] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9584f72-3f66-460b-98b5-c8bcbb8e3f39 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.888425] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3b2a7b-11d8-4f41-a216-f8ff82c571f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.903017] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568943a2-b539-48bb-81bb-05458095d23d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.909366] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fb502f-cd20-4d53-8602-78deed8f2d02 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.938771] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181297MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 755.938895] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.939085] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.959332] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 756.959610] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 756.972556] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9886cc2b-fbf8-4380-a5f3-11b940e17ef7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.980421] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465dc59d-4de7-4427-8310-0942ee0fa683 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.011173] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec016a51-6b98-467d-a1aa-c0f45e621cb9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.017795] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e13c3b-2cc0-4549-a03d-d3e19286803f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.030203] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.533101] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.534453] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 757.534679] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.706781] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.707191] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.707240] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 807.210729] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] There are 0 instances to clean {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 807.211017] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.211176] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances with incomplete migration {{(pid=62875) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 807.714347] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.213605] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.213919] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.214114] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 811.214240] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 811.717549] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 811.717796] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.717959] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.718123] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.718278] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.718424] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.718574] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 811.718735] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 812.222634] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.223053] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.223053] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.223209] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 812.224151] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8571a19c-68c1-49cb-9a12-fdcf87e163c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.232785] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431eac79-89de-4442-b262-67af3f926a59 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.246696] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92e5ccf-6a91-474b-b1f9-4056fa44eea1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.252905] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b017ba9-ec82-4b76-be52-6a60c0da5552 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.280965] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181298MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 812.281115] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.281305] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.299683] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 813.299903] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 813.315150] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5265ea1-71bb-4b12-9b86-71e545f39b46 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.321025] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0af4fd2-6256-4039-943e-c8e15f6aba2a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.351140] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42024cc4-2444-4159-99b8-d2e95eabfef3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.359098] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92c77cf-afab-4aa1-a2a2-997766c8668e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.373636] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.876047] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.876751] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 813.876984] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.865701] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.702291] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.707095] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.707456] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.707565] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 871.706684] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 871.706963] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 871.706963] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 872.210457] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 872.210786] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.210896] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.706624] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.210186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.210457] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.210634] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.210891] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 873.211796] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916e080f-a736-4e78-bf5e-44a81884e326 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.220502] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67515d8f-bde5-4518-aff7-f2d27acd3837 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.234319] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4acf1c0-bd60-4279-84ee-56f57c392ef3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.240507] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0caed8f4-f8d2-4940-a613-c468baa0010c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.269639] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181272MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 873.269796] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.269968] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.324778] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 874.325072] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 874.341522] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 874.354074] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 874.354258] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.365754] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 874.381078] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 874.392603] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d988412-ab2d-4a50-a452-8779ebc867c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.400079] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7af3daf-8fca-4f7a-afd0-b84ecd2047bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.430583] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1d109c-757e-405a-86a8-0f6014524c0a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.438025] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de46d038-9d6c-4e7f-b5d4-7e708677557f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.450764] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.953714] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.954962] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 874.955170] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.685s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.955806] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.460686] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.706454] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.702171] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.705665] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.705846] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.705997] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.706151] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 932.707415] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.707768] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 932.707768] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 933.211023] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 933.706345] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.706642] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.210117] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.210458] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.210580] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.210665] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 934.211592] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ed9321-aed8-4403-9c6d-981eb2e3a5d6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.220383] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b66cd96-a299-40a3-a8a6-c0c44b0b5308 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.233867] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b394897e-3ff0-470c-93ae-df1113d56df5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.240060] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acb96e2-bae0-4409-b5ba-9a66a58a17b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.267766] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181286MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 934.267920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.268104] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.286379] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 935.286658] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 935.299834] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2704903-2ac2-4871-84fa-93ba59eb6a7f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.307458] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772d8cef-f37d-4130-a47a-bb156dc0caaf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.338089] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbc7494-1940-4440-92e6-6002bb84fddc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.345556] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d51b23f-3767-4d0c-8f93-82df32b51a70 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.359224] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.862105] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.863484] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 935.863702] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.863725] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.708750] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.702596] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.706166] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.706322] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 992.707676] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 993.707607] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 993.708834] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 993.708834] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.211827] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.212063] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.212232] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.212384] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 994.213297] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e071b5d5-df4e-4f28-96aa-3a784687fbba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.221216] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a7ca45-7478-423d-acab-c0a89d2480e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.234925] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c19b0cf-fbb3-44d2-8bf0-c53a57d8a9b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.241256] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b779ed-1bcc-4da5-931a-a4708af5b5cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.270762] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181292MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 994.270897] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.271099] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.289677] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 995.289922] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 995.302816] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db413cde-5fbd-4190-865c-0ed4eeaf3489 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.310441] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9528bf-eb7a-4f4a-b240-77c9436bc98c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.339866] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0aa381-8a57-49ca-9469-803917575651 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.349846] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8886cb-2126-4057-a413-3f27f522fd37 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.360605] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.864189] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.865494] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 995.865677] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.864447] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.369234] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.369424] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 997.369561] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 997.872571] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 997.872870] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.707506] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.707920] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.707026] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.707248] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1054.707163] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.707543] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.707543] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.706421] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.706649] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1056.211056] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.211056] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.211056] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.211056] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1056.211621] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692bf696-71ab-46e3-b11f-efee2151d9b9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.219574] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110a7f04-808c-4695-be7c-b6afbc91095e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.233256] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c64b014-5eea-4de7-8b1c-00bbaf19b0fb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.239252] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d3fecf-643a-40e8-b69a-209e4292a862 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.269536] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181289MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1056.269711] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.269888] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.287765] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1057.288067] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1057.301033] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7f357a-e868-41da-8e6e-f41b62b837e9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.308806] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee20f96-e3f5-47bc-9020-cc65cda9b633 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.338226] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b8b96b-d841-4152-b8ea-86d64f8c0e54 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.345263] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639ac91d-d27a-4138-9669-db0568640fbe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.357852] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.861130] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.862456] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1057.862618] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.863339] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.863688] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1058.863688] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1059.368071] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1112.206882] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.706212] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1113.706487] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1113.706863] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1114.706593] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.707419] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.707825] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.707825] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.211368] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.211627] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.211796] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.211949] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1116.212868] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403c7f0b-af75-4513-9acb-5c1c9f03a00d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.221443] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736cf171-135f-464f-9423-90b2b069416b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.236341] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0922007a-bfac-47a6-a495-4ac669e7ba80 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.242802] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388ea888-f202-4b45-b46d-c4da9ca6b513 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.272529] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181295MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1116.272702] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.272905] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.292536] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1117.292783] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1117.306017] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c49b00-d359-4ab2-bbd2-730ecf602eb6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.313589] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f501f1a7-789e-4537-aecd-4327d25ba69c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.342698] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8172a3-e10d-4040-bfcd-52bd7c7928a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.349395] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d005f641-8b19-468d-bd17-ad1b2024e173 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.362422] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.865602] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.866838] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1117.867024] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.867235] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.867370] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1118.370465] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] There are 0 instances to clean {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1118.370866] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.208823] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.713459] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.713821] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1119.713821] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1120.216245] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1120.216491] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1120.216670] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1120.216802] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances with incomplete migration {{(pid=62875) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1165.809213] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.312821] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Getting list of instances from cluster (obj){ [ 1166.312821] env[62875]: value = "domain-c8" [ 1166.312821] env[62875]: _type = "ClusterComputeResource" [ 1166.312821] env[62875]: } {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1166.313984] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59aa391-3f40-4791-b8ce-1a792ec7e430 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.322901] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Got total of 0 instances {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1174.215968] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.705978] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.706254] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.706397] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1175.708154] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.706258] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.706558] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.706819] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.706368] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.209786] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.210061] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.210217] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.210369] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1178.211405] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efcb78d-6352-452c-a6d7-ed7c7356c3fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.219547] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc4d4eb-608e-4852-ae64-c3987a547fc3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.232908] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831984d9-dfef-45d5-8fdc-df91b0ff027d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.239250] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408e27ca-68ad-4cd5-a6b7-6c27e1824780 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.268760] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181286MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1178.268886] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.269082] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.399998] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1179.399998] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1179.415261] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1179.426090] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1179.426090] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1179.434905] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1179.449475] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1179.461016] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b3fcc1-063b-4012-a3c3-6744c43bce4a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.466427] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9bf4ec-a0dc-4c24-b7ff-2b92cada229e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.496474] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dccb769-e000-48cf-836e-f0ce9833da33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.504093] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3f7e7a-d5fc-421d-920f-923a0d6b1d4b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.517114] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.022838] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.022838] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1180.022838] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.754s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.025060] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.025060] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1181.025060] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1181.528156] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1234.707390] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.707798] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.707798] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1236.708506] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1236.708879] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1236.708879] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1236.709069] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.706800] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.210098] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.210441] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.210484] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.210636] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1238.211672] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bc92a4-6840-47a7-be18-7061a6620b7c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.219885] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138c15eb-4e40-4eaa-9fb5-4bdda0bb816b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.233793] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbef5ed-ffd7-4cb7-ae29-2da1bcc4b597 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.239951] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d68ce3d-68d9-4a0f-9fac-ff4e125ff8c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.268277] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181269MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1238.268453] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.268583] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.289035] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1239.289310] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1239.304276] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ecb8f0-d023-49a3-bd23-a8110e5aeb55 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.311789] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe05e704-a5da-47e4-92a6-29667c4a31e2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.341093] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4c192b-32fc-407a-addd-ac375ec791ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.348051] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65db7431-405b-4b50-a1f7-91491187b591 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.360538] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.863903] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1239.865159] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1239.865345] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.597s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.866011] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.374512] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.374512] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1241.374512] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1241.876951] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1241.876951] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.707917] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.708387] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.708387] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1297.706539] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.707532] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.707532] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.706758] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1299.706731] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.209546] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.209858] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.209974] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.210144] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1300.211063] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c54255-75b2-48c8-b27b-c5d6413bf989 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.219143] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c3bc2e-27dd-4c0d-90cb-60755068d058 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.232693] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ade734-9bc8-4b9f-98a8-55de6b57b864 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.238795] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513522d1-a4be-4538-8a1c-8149f9cef760 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.267618] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181273MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1300.267778] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.267981] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.288172] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1301.288462] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1301.301988] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18930843-931e-46c4-9590-c293bfe54979 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.309463] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd98efc6-1ba5-4852-94c8-eaec5e362e65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.339162] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a613fa4-5ef3-4998-ad26-581b4ac000ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.345868] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d049e4-e808-47f8-bd00-b6d2c5041df6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.358792] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.861903] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1301.863215] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1301.863396] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.864081] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.864451] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1302.864451] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1303.367814] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1303.368085] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.207764] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.707107] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.707296] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1358.706562] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.706854] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.706935] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.707108] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.706846] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.210270] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.210526] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.210706] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.210859] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1360.211887] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f3e45a-1cd2-4c3f-8ef4-dc0eb396b9ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.220156] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62178b89-d536-4243-8723-83aa48ca3717 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.233907] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d15929d-7fdc-4953-9234-de4771aa77f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.239960] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff74bb77-c416-45bc-8dcf-a5c3577633f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.270055] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181290MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1360.270055] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.270055] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.287159] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1361.287409] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1361.299565] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea244c2e-b674-49a8-956f-624c17bc5dda {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.307373] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9eb1eb-6536-4e40-b89b-e0d00074b3aa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.337854] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6a6494-e47e-4a33-9412-1614795e2db4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.345353] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def77d09-73a9-4c93-ad06-fa570022c2c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.358610] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.862059] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1361.863342] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1361.863522] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.864662] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.368760] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.369082] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1363.369082] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1363.872566] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1363.872940] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.706614] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.707026] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.707026] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.707193] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1419.707580] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.707950] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.707996] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.708171] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.708301] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances with incomplete migration {{(pid=62875) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1422.208831] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1422.712107] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.712396] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.712570] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.712730] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1422.713738] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7133b9c6-b205-4c5a-a9f9-f615d8623d31 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.722762] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45578f2d-9a77-4ac9-a84b-15f388b52a2a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.736520] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92176ae-306f-4848-b9fe-1e3a9bfb6d73 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.742712] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9c99b7-ffaf-40be-99ae-c234a789fc37 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.771510] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181274MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1422.771654] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.771870] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.791102] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1423.791102] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1423.804257] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e06442-ba90-4062-9d2b-f8aeb9ed009b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.811832] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e6d5a5-e12f-4edf-92cb-cdbf024b058e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.841957] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373798ff-4a50-40e0-aa7f-444a6c710306 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.849172] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84d7098-6beb-403d-94d8-91b8559b1d64 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.861802] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1424.364766] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1424.366064] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1424.366253] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.366466] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.366600] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1424.870015] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] There are 0 instances to clean {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1425.367750] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1425.367968] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1425.368054] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1425.871297] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1425.871657] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.708161] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1479.204652] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1479.706950] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1479.707240] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1479.707408] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1479.707552] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1480.706750] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1481.707367] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1482.706903] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1482.707169] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1482.707213] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1483.209615] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1483.209960] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.713279] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.713538] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.713692] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.713849] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1483.714869] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f1225f-683d-4642-ac35-8ef5352aab09 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.723588] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5423ee3-74a0-448a-b60c-fced075a02f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.737684] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d029d0-fc55-4b84-8985-bc10493acb96 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.744293] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dad9332-de4e-4f49-9136-0557309a85d0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.773336] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181269MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1483.773468] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.773724] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.807970] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1484.808254] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1484.825330] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1484.836593] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1484.836593] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1484.844732] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1484.861159] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1484.871075] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943e6753-134d-48c0-a1c9-21c45cc9d2d9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.880073] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabf535b-27ad-4ed3-8cb3-039300bec90b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.910212] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da97f69-0fff-4cbc-9bcb-0d30f1cdf274 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.917426] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a1acb8-8ef2-40da-938c-baea0bf18e31 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.930448] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.435489] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1485.435489] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1485.435489] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.662s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.931939] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1486.436959] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1539.208324] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1539.706567] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1539.706798] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1539.706940] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1541.708688] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1541.709121] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.707131] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.210320] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.210759] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.210882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.211023] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1543.211946] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3c0e3b-e643-43aa-a221-3c421777a969 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.220368] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1af396-d416-4a19-ae86-3786d2f41b44 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.235223] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd02855-f721-4216-a25c-75b564f6810c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.241445] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6923b06-ee03-49e0-ae9e-99bf037f8129 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.269529] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181248MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1543.269682] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.269839] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.288197] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1544.288483] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1544.301394] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446f7d69-1348-47f8-94d2-b3a35b1667a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.308862] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55acda41-86d7-4099-a21f-45fc7237ff11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.339188] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ef6013-111a-43c5-b6a9-cc395d3b7626 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.345858] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca6aa4f-4305-4771-ba0c-3f74c5d089d9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.358609] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.862091] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1544.863438] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1544.863625] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.863452] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1545.863846] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1545.863846] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1546.367020] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1546.367279] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.367451] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1599.707780] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1599.708173] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.707058] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.707058] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1602.706356] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1603.706600] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1603.706870] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.706156] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1605.209089] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.209439] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.209559] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.209652] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1605.210538] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb4a473-a4ed-4238-91dd-603ae6401805 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.218910] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97fba5a-0274-4783-906b-60d376e452f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.232873] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9144a25c-2d11-48d2-a8e0-637c58fc8c86 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.239139] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf96d985-da5d-4eee-a47a-77fc778b9696 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.268255] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181244MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1605.268428] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.268591] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.286426] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1606.286724] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1606.300029] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef77827-a1d9-4fca-a9ca-5a0ac09a0203 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.307214] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2d36af-26b0-46a0-98a0-e03aa3d2ea37 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.336061] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9897fe4d-bbc7-4e76-a91d-789e2a36202c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.342818] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab62b3f5-f33c-4810-aa40-bef796d986d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.355876] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1606.859332] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1606.860620] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1606.860803] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.861935] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1608.367058] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1608.367058] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1608.367058] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1608.869827] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1608.870222] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1661.709788] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1661.710283] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1661.710283] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1661.710367] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1662.707551] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.706937] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1664.707728] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1666.706109] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1666.706459] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.209954] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.211569] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.211569] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.211569] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1667.211569] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0146045b-8244-45ff-9311-891249bcc617 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.219636] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea96896c-2741-423e-938d-8e0c4303d91c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.232935] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e99ce3-38d5-4338-b5a8-1347f07acb46 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.238782] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fbed12-f25d-4f26-b5ee-2d43053e39d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.267503] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181237MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1667.267634] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.267821] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.287637] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1668.287637] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1668.300067] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed0e71d-2688-4783-b1ec-8b209659fb07 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.306140] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e18fb8-b248-4459-a692-fd97b70aa4a8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.336062] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1621eab4-d694-4c8f-9910-8e910412519e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.343261] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efec1e3-6d4c-4657-83c0-7ffd023fd3bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.356154] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1668.860875] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1668.860875] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1668.860875] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.861353] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1669.861864] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1669.862035] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1670.365418] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1721.707168] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1722.707197] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1722.707575] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1722.707575] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1723.702597] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.706224] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.706402] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.706782] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1726.209431] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] There are 0 instances to clean {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1727.209783] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1727.210289] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1727.702525] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.705998] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.706433] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1728.706433] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1729.209452] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1729.209708] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1729.712874] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.713293] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.713293] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.713455] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1729.714462] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9855e6-3152-4abf-b574-80ee9d3dccd5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.722869] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffe4983-abec-4d17-8e75-78fcbcd344f3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.737177] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb866e50-5f6f-4ca6-9e74-756ba32b989c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.743920] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f41478-9048-4139-bdd9-2667fddb9ab7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.776744] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181229MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1729.776913] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.777134] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.794411] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1730.794691] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1730.807793] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0aefd5-a39e-4825-9d99-7e4091828444 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.815531] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9dde39-d45f-4801-bd39-e8cc976cb653 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.844476] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98c3d81-4bd6-4cd4-9a4f-ea88f4895b4a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.851200] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52088286-f3bd-4a27-91b0-29c5185320f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.864182] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1731.367137] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1731.368472] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1731.368657] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.706885] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1733.707308] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances with incomplete migration {{(pid=62875) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1742.708552] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1783.211657] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1783.211657] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1783.702660] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1783.706542] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1783.706542] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1783.706542] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1786.305067] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1786.807476] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Getting list of instances from cluster (obj){ [ 1786.807476] env[62875]: value = "domain-c8" [ 1786.807476] env[62875]: _type = "ClusterComputeResource" [ 1786.807476] env[62875]: } {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1786.808612] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5ada7d-cff7-4e49-8e4c-58995e5d2bb2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.817181] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Got total of 0 instances {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1787.219219] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1787.707186] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.706937] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.707209] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1788.707487] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1789.210637] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1789.210908] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.713656] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.714071] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.714071] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.714241] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1789.715132] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc2f60f-266e-49f0-b795-64ef2b00221a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.723123] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4f5537-4ce2-44c3-bdb5-f03c38ca75cf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.736605] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacc6a19-8163-428a-919f-8709aca18829 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.742784] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58468a9c-c636-48c8-8b31-0386304510af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.771557] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181199MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1789.771689] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.771874] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.889689] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1790.889962] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1790.904903] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1790.915656] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1790.915826] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1790.924538] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1790.938793] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1790.950701] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c2eb69-59d1-495c-90ae-5509d7caf0b4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.958744] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d17ed3e-3394-4d29-94a3-ea103f310eb5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.988165] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45502f1b-c48e-49e5-9f1d-52fd3a8ed376 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.995177] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adb1e62-b866-47cc-a192-ea4056a1d41a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.008735] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1791.512349] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1791.513598] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1791.513787] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.742s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.609653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquiring lock "f27770de-40f5-4d5a-8819-a62d8f9a320a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.610867] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "f27770de-40f5-4d5a-8819-a62d8f9a320a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.114610] env[62875]: DEBUG nova.compute.manager [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1822.393235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Acquiring lock "f2891dfe-2464-4699-b4cd-54fa97cfb305" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.394431] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Lock "f2891dfe-2464-4699-b4cd-54fa97cfb305" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.559070] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "048df8ed-3be9-430f-8ade-c3cabcb0f16c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.559312] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "048df8ed-3be9-430f-8ade-c3cabcb0f16c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.679246] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.679246] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.680892] env[62875]: INFO nova.compute.claims [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1822.890941] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "15f747f0-43c0-4580-ab1c-28eadade4b82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.891047] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "15f747f0-43c0-4580-ab1c-28eadade4b82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.899481] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1822.949805] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "e41036b6-5ff4-4263-b319-9627b176b2dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.951139] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "e41036b6-5ff4-4263-b319-9627b176b2dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.984987] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "f013c0a3-fb9c-480b-b479-e81c1b1e8234" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.985414] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "f013c0a3-fb9c-480b-b479-e81c1b1e8234" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.062951] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1823.393547] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1823.434164] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.452658] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1823.489611] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1823.600136] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.811056] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b01872-183f-4bac-aa86-c0664102d28c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.821861] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d120c90d-f348-402b-8def-3b471d8ef5a9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.859725] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663d725b-b555-43c7-9950-71ab3b1ac7a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.867403] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72b07ce-01af-44df-a0ad-3be6ba1fc5fb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.882637] env[62875]: DEBUG nova.compute.provider_tree [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1823.915090] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.974380] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.015162] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.370732] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Acquiring lock "24f59fd1-63bf-4292-81f1-dc762510151f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.371469] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Lock "24f59fd1-63bf-4292-81f1-dc762510151f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.392056] env[62875]: DEBUG nova.scheduler.client.report [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1824.753449] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Acquiring lock "ec33d0e1-4435-45c3-8ecf-33465cafda0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.753810] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Lock "ec33d0e1-4435-45c3-8ecf-33465cafda0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.876045] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1824.899813] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.900505] env[62875]: DEBUG nova.compute.manager [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1824.903107] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.469s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.906724] env[62875]: INFO nova.compute.claims [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1825.259196] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1825.333928] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Acquiring lock "d77191c5-8593-4730-8612-4877e059e7dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.334234] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Lock "d77191c5-8593-4730-8612-4877e059e7dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.345520] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Acquiring lock "c7f0f27e-5bb6-4306-ab9f-282578d1cfce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.345784] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Lock "c7f0f27e-5bb6-4306-ab9f-282578d1cfce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.408668] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.412657] env[62875]: DEBUG nova.compute.utils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1825.414838] env[62875]: DEBUG nova.compute.manager [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Not allocating networking since 'none' was specified. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1825.781782] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.842592] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1825.847763] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1825.917104] env[62875]: DEBUG nova.compute.manager [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1826.132639] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e15eca1-afab-4a0d-ac29-5c5bf27d1147 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.142831] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68582eda-4718-4ac3-b9a4-b4f1ae98aa31 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.180891] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bc918f-fef3-43ad-a41f-aab62b26365b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.188675] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f23a6d-b4bd-430e-94cf-d4fd63bfc2b5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.203681] env[62875]: DEBUG nova.compute.provider_tree [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1826.382423] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.401389] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.678412] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "495c8c6c-f90e-4a26-a248-0672e08df66a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.678657] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "495c8c6c-f90e-4a26-a248-0672e08df66a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.708019] env[62875]: DEBUG nova.scheduler.client.report [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1826.932914] env[62875]: DEBUG nova.compute.manager [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1827.215898] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.216589] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1827.220211] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.620s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.221994] env[62875]: INFO nova.compute.claims [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1827.606200] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1827.606200] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1827.606200] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1827.606357] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1827.606357] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1827.606357] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1827.606610] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1827.606923] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1827.607282] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1827.607646] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1827.607952] env[62875]: DEBUG nova.virt.hardware [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1827.608997] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21a5ff1-1172-4af7-a23a-932ad8388293 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.620187] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df03a86-36dc-4fd6-8deb-339ab65a4696 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.639113] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422bc724-87af-4fba-a6d0-53d73ca3fc6c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.664793] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1827.675492] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1827.675903] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c1c15eb-705c-435d-8f00-62abe35759b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.688944] env[62875]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1827.689158] env[62875]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62875) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1827.689548] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1827.689700] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Creating folder: Project (eb4b99ef84e44b08bc3c680efa8b5595). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1827.689962] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c6f3368-03a6-4a18-8ac9-bced57356e51 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.698856] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Created folder: Project (eb4b99ef84e44b08bc3c680efa8b5595) in parent group-v444854. [ 1827.699100] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Creating folder: Instances. Parent ref: group-v444858. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1827.699389] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bca89771-ee2f-452d-bc35-31ebf2949f85 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.708035] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Created folder: Instances in parent group-v444858. [ 1827.708327] env[62875]: DEBUG oslo.service.loopingcall [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1827.708544] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1827.708773] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a60e2604-7c12-430e-b954-0707efac46bc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.726790] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1827.726790] env[62875]: value = "task-2179919" [ 1827.726790] env[62875]: _type = "Task" [ 1827.726790] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.728444] env[62875]: DEBUG nova.compute.utils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1827.735015] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1827.735015] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1827.750681] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179919, 'name': CreateVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.904936] env[62875]: DEBUG nova.policy [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49f407ce8f174ee29aa3f8df24a633a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '340ab48399c7474592ade5da48f86591', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1828.240315] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179919, 'name': CreateVM_Task, 'duration_secs': 0.296433} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.240650] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1828.242203] env[62875]: DEBUG oslo_vmware.service [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b673e0-1628-4067-b7cd-de5b53c33e88 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.251981] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1828.259624] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.259886] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.260494] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1828.260764] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0ce752c-9796-4975-89ed-f03e4064d719 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.275149] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1828.275149] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525bb4aa-c25f-1856-12ca-eb5f6618d857" [ 1828.275149] env[62875]: _type = "Task" [ 1828.275149] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.283719] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525bb4aa-c25f-1856-12ca-eb5f6618d857, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.462110] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a40261-f4c9-4a4b-adb7-1d8f82c7d3d9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.470080] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17765ef-eaad-46cb-b854-7787d5685e68 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.502437] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7c900a-bc7a-4330-a93b-9552d8b99c62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.508878] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3136395-031f-435b-b69a-99436ab673d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.527136] env[62875]: DEBUG nova.compute.provider_tree [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.672439] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Successfully created port: e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1828.788187] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.788727] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1828.789030] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.789236] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.789653] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1828.789917] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19d6531e-af53-46a0-bc47-bebb0ed279d6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.810019] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1828.810019] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1828.810830] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5713423-07d0-4f49-9249-48eeed72e0c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.824232] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0418a36-d06c-4435-bca7-fb201cf84334 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.831051] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1828.831051] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c9378-d290-a3b8-8d45-9138743b4890" [ 1828.831051] env[62875]: _type = "Task" [ 1828.831051] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.841407] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c9378-d290-a3b8-8d45-9138743b4890, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.031507] env[62875]: DEBUG nova.scheduler.client.report [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1829.266325] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1829.294076] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1829.294169] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1829.294419] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1829.294533] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1829.294876] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1829.294876] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1829.295807] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1829.295807] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1829.295807] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1829.295807] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1829.295807] env[62875]: DEBUG nova.virt.hardware [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1829.298343] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b2d69c-4ef6-4447-8604-22ab4cf27133 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.311040] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fc087f-8e97-4e92-822e-2a2eeea16663 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.346359] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Preparing fetch location {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1829.346359] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Creating directory with path [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1829.346359] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2cf74da-36e4-4c44-9c3b-67b47c0d6292 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.364224] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Created directory with path [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1829.364448] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Fetch image to [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1829.364618] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Downloading image file data a9637bcc-4de8-4ea1-be59-4c697becf2a7 to [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk on the data store datastore2 {{(pid=62875) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1829.365450] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f74e5e-7e11-48f5-90db-f84f1c587985 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.377149] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4dabd4-1025-433f-9d59-2a02d96e7fa7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.389167] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b70be5-9b5e-478d-8cef-374d950be639 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.460640] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc87f64-70be-4199-824b-54b5c1777e5a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.470548] env[62875]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-01f07462-a162-4058-ac7c-1d261f2f54a5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.537675] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.538223] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1829.541372] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.626s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.547020] env[62875]: INFO nova.compute.claims [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1829.568776] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Downloading image file data a9637bcc-4de8-4ea1-be59-4c697becf2a7 to the data store datastore2 {{(pid=62875) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1829.662084] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62875) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1830.053873] env[62875]: DEBUG nova.compute.utils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1830.058969] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1830.058969] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1830.183773] env[62875]: DEBUG nova.policy [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '745f4fc89adc4edf8866270f43541b57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '645131c9b9d14a4ea8a70b26943ab45d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1830.434303] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Acquiring lock "cefcbb6a-378b-4927-b115-d648017502e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.434585] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Lock "cefcbb6a-378b-4927-b115-d648017502e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.488283] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Completed reading data from the image iterator. {{(pid=62875) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1830.488398] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1830.540662] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Downloaded image file data a9637bcc-4de8-4ea1-be59-4c697becf2a7 to vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk on the data store datastore2 {{(pid=62875) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1830.543554] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Caching image {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1830.543554] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Copying Virtual Disk [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk to [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1830.543777] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2311a92-fb1a-4a1f-9bbc-684ea5147656 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.554027] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1830.554027] env[62875]: value = "task-2179921" [ 1830.554027] env[62875]: _type = "Task" [ 1830.554027] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.560970] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1830.572262] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179921, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.781208] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c244d48-2f09-44f6-9842-548e0dc59d4b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.791873] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cdbd11-0d7c-428c-b000-05762c1cdb10 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.826601] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daed5cd6-e16f-46bb-99c4-77c5d0e3da9c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.835496] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a404af3b-0983-435f-b58c-aca89cffe4c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.850253] env[62875]: DEBUG nova.compute.provider_tree [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.999961] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Successfully created port: c38cd795-ea9d-4e01-a734-b80e9a180e5a {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1831.070441] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179921, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.354131] env[62875]: DEBUG nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1831.571531] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179921, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.574290] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1831.612140] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1831.612519] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1831.612709] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1831.612919] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1831.613104] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1831.613287] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1831.613535] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1831.613722] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1831.613917] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1831.614131] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1831.614308] env[62875]: DEBUG nova.virt.hardware [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1831.615297] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dc5a3c-4135-47f0-9b74-a7066a21a8db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.626911] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273767a3-cf07-46d3-b24d-5508ecaff8bc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.860714] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.861432] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1831.868031] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.893s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.870018] env[62875]: INFO nova.compute.claims [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1832.068097] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179921, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.048918} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.068097] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Copied Virtual Disk [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk to [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1832.068097] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Deleting the datastore file [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1832.069251] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea81b00f-b4ee-4a37-9a21-b186580e6dc6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.079481] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1832.079481] env[62875]: value = "task-2179922" [ 1832.079481] env[62875]: _type = "Task" [ 1832.079481] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.083019] env[62875]: ERROR nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. [ 1832.083019] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1832.083019] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1832.083019] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1832.083019] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1832.083019] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1832.083019] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1832.083019] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1832.083019] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1832.083019] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1832.083019] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1832.083019] env[62875]: ERROR nova.compute.manager raise self.value [ 1832.083019] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1832.083019] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1832.083019] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1832.083019] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1832.083439] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1832.083439] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1832.083439] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. [ 1832.083439] env[62875]: ERROR nova.compute.manager [ 1832.083439] env[62875]: Traceback (most recent call last): [ 1832.083439] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1832.083439] env[62875]: listener.cb(fileno) [ 1832.083439] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1832.083439] env[62875]: result = function(*args, **kwargs) [ 1832.083439] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1832.083439] env[62875]: return func(*args, **kwargs) [ 1832.083439] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1832.083439] env[62875]: raise e [ 1832.083439] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1832.083439] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1832.083439] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1832.083439] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1832.083439] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1832.083439] env[62875]: with excutils.save_and_reraise_exception(): [ 1832.083439] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1832.083439] env[62875]: self.force_reraise() [ 1832.083439] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1832.083439] env[62875]: raise self.value [ 1832.083439] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1832.083439] env[62875]: updated_port = self._update_port( [ 1832.083439] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1832.083439] env[62875]: _ensure_no_port_binding_failure(port) [ 1832.083439] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1832.083439] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1832.084198] env[62875]: nova.exception.PortBindingFailed: Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. [ 1832.084198] env[62875]: Removing descriptor: 16 [ 1832.085832] env[62875]: ERROR nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Traceback (most recent call last): [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] yield resources [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self.driver.spawn(context, instance, image_meta, [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] vm_ref = self.build_virtual_machine(instance, [ 1832.085832] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] vif_infos = vmwarevif.get_vif_info(self._session, [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] for vif in network_info: [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] return self._sync_wrapper(fn, *args, **kwargs) [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self.wait() [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self[:] = self._gt.wait() [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] return self._exit_event.wait() [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1832.086206] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] result = hub.switch() [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] return self.greenlet.switch() [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] result = function(*args, **kwargs) [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] return func(*args, **kwargs) [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] raise e [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] nwinfo = self.network_api.allocate_for_instance( [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] created_port_ids = self._update_ports_for_instance( [ 1832.086536] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] with excutils.save_and_reraise_exception(): [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self.force_reraise() [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] raise self.value [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] updated_port = self._update_port( [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] _ensure_no_port_binding_failure(port) [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] raise exception.PortBindingFailed(port_id=port['id']) [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] nova.exception.PortBindingFailed: Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. [ 1832.086954] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] [ 1832.087300] env[62875]: INFO nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Terminating instance [ 1832.102811] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.310815] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "9abafa57-9674-45f7-90cd-f80a8c80b567" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.311063] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "9abafa57-9674-45f7-90cd-f80a8c80b567" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.376752] env[62875]: DEBUG nova.compute.utils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1832.380779] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1832.380779] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1832.567846] env[62875]: DEBUG nova.policy [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4031022ab7a4346bdf343da18ff3676', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb011aca8c67403a963b93eb03f665ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1832.598945] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Acquiring lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.598945] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Acquired lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.598945] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1832.602740] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031673} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.602740] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1832.602740] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Moving file from [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553/a9637bcc-4de8-4ea1-be59-4c697becf2a7 to [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7. {{(pid=62875) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1832.602740] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-e5c81583-3016-435b-b3dc-027814c14158 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.610757] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1832.610757] env[62875]: value = "task-2179924" [ 1832.610757] env[62875]: _type = "Task" [ 1832.610757] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.628029] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179924, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.753554] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Acquiring lock "3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.753794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Lock "3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.881649] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1833.095137] env[62875]: DEBUG nova.compute.manager [req-83ed0878-321e-434e-a59c-b9d2273c7757 req-b82c3924-ea2e-477d-9456-48bffeff4dd0 service nova] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Received event network-changed-e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1833.095239] env[62875]: DEBUG nova.compute.manager [req-83ed0878-321e-434e-a59c-b9d2273c7757 req-b82c3924-ea2e-477d-9456-48bffeff4dd0 service nova] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Refreshing instance network info cache due to event network-changed-e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1833.095420] env[62875]: DEBUG oslo_concurrency.lockutils [req-83ed0878-321e-434e-a59c-b9d2273c7757 req-b82c3924-ea2e-477d-9456-48bffeff4dd0 service nova] Acquiring lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.125964] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179924, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.030382} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.126162] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] File moved {{(pid=62875) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1833.126360] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Cleaning up location [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1833.126521] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Deleting the datastore file [datastore2] vmware_temp/e8d674c8-9b60-4931-be16-64fbc048f553 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1833.126774] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00525bc1-6163-4f9a-b539-3e2096bf1484 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.135938] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1833.135938] env[62875]: value = "task-2179925" [ 1833.135938] env[62875]: _type = "Task" [ 1833.135938] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.149715] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.150717] env[62875]: ERROR nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. [ 1833.150717] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1833.150717] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1833.150717] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1833.150717] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1833.150717] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1833.150717] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1833.150717] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1833.150717] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1833.150717] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1833.150717] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1833.150717] env[62875]: ERROR nova.compute.manager raise self.value [ 1833.150717] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1833.150717] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1833.150717] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1833.150717] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1833.151163] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1833.151163] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1833.151163] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. [ 1833.151163] env[62875]: ERROR nova.compute.manager [ 1833.151163] env[62875]: Traceback (most recent call last): [ 1833.151163] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1833.151163] env[62875]: listener.cb(fileno) [ 1833.151163] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1833.151163] env[62875]: result = function(*args, **kwargs) [ 1833.151163] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1833.151163] env[62875]: return func(*args, **kwargs) [ 1833.151163] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1833.151163] env[62875]: raise e [ 1833.151163] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1833.151163] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1833.151163] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1833.151163] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1833.151163] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1833.151163] env[62875]: with excutils.save_and_reraise_exception(): [ 1833.151163] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1833.151163] env[62875]: self.force_reraise() [ 1833.151163] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1833.151163] env[62875]: raise self.value [ 1833.151163] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1833.151163] env[62875]: updated_port = self._update_port( [ 1833.151163] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1833.151163] env[62875]: _ensure_no_port_binding_failure(port) [ 1833.151163] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1833.151163] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1833.151851] env[62875]: nova.exception.PortBindingFailed: Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. [ 1833.151851] env[62875]: Removing descriptor: 19 [ 1833.151851] env[62875]: ERROR nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Traceback (most recent call last): [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] yield resources [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self.driver.spawn(context, instance, image_meta, [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1833.151851] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] vm_ref = self.build_virtual_machine(instance, [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] vif_infos = vmwarevif.get_vif_info(self._session, [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] for vif in network_info: [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] return self._sync_wrapper(fn, *args, **kwargs) [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self.wait() [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self[:] = self._gt.wait() [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] return self._exit_event.wait() [ 1833.152508] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] result = hub.switch() [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] return self.greenlet.switch() [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] result = function(*args, **kwargs) [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] return func(*args, **kwargs) [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] raise e [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] nwinfo = self.network_api.allocate_for_instance( [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1833.152933] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] created_port_ids = self._update_ports_for_instance( [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] with excutils.save_and_reraise_exception(): [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self.force_reraise() [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] raise self.value [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] updated_port = self._update_port( [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] _ensure_no_port_binding_failure(port) [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1833.153257] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] raise exception.PortBindingFailed(port_id=port['id']) [ 1833.153553] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] nova.exception.PortBindingFailed: Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. [ 1833.153553] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] [ 1833.153553] env[62875]: INFO nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Terminating instance [ 1833.157106] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01939594-949f-47a8-901e-c46689ac85ca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.169112] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93404a40-763c-4107-884b-6e598472038c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.208651] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1833.211122] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d434c8-ea7a-455b-a743-e33d0ee48fe3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.220371] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f005de3-d6b5-4365-bd88-1a1fea30b3c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.235550] env[62875]: DEBUG nova.compute.provider_tree [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1833.611131] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.647166] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029453} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.647428] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1833.648204] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b07e3c49-58c2-4ca3-9209-e1904afc2d62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.655163] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1833.655163] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522c3468-2b47-8ba7-50bc-9d4b129f932a" [ 1833.655163] env[62875]: _type = "Task" [ 1833.655163] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.661263] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.661476] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquired lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.662077] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1833.666666] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522c3468-2b47-8ba7-50bc-9d4b129f932a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.741585] env[62875]: DEBUG nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1833.896669] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1833.921504] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1833.921751] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1833.921910] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1833.922105] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1833.922255] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1833.922400] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1833.922606] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1833.922766] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1833.922931] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1833.923190] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1833.923414] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1833.924312] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adbea21-c632-4434-8ff2-fb9c7e4fecf7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.933305] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a18a48-91a7-4a73-8f9c-b6a506eae3cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.033753] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Successfully created port: 73ed3ab8-2695-4597-9f6d-15d35a9ebc69 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1834.114909] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Releasing lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.114909] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1834.114909] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1834.114909] env[62875]: DEBUG oslo_concurrency.lockutils [req-83ed0878-321e-434e-a59c-b9d2273c7757 req-b82c3924-ea2e-477d-9456-48bffeff4dd0 service nova] Acquired lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.115069] env[62875]: DEBUG nova.network.neutron [req-83ed0878-321e-434e-a59c-b9d2273c7757 req-b82c3924-ea2e-477d-9456-48bffeff4dd0 service nova] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Refreshing network info cache for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1834.116410] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d93a8a5-b4ca-494b-b81e-44e2e3e2e26e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.133095] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4cfdeb-a9ac-4c7b-8fa8-60a293d82966 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.166142] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f2891dfe-2464-4699-b4cd-54fa97cfb305 could not be found. [ 1834.166384] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1834.166998] env[62875]: INFO nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1834.167277] env[62875]: DEBUG oslo.service.loopingcall [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1834.170857] env[62875]: DEBUG nova.compute.manager [-] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1834.170947] env[62875]: DEBUG nova.network.neutron [-] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1834.182204] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522c3468-2b47-8ba7-50bc-9d4b129f932a, 'name': SearchDatastore_Task, 'duration_secs': 0.00912} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.182204] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.182204] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] f27770de-40f5-4d5a-8819-a62d8f9a320a/f27770de-40f5-4d5a-8819-a62d8f9a320a.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1834.182437] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb5dd84a-e324-4430-9ce7-595c6194eb18 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.193398] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1834.193398] env[62875]: value = "task-2179926" [ 1834.193398] env[62875]: _type = "Task" [ 1834.193398] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.209110] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179926, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.223923] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1834.246291] env[62875]: DEBUG nova.network.neutron [-] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1834.248266] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.248751] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1834.252104] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.237s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.252880] env[62875]: INFO nova.compute.claims [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1834.451637] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.707677] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179926, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491417} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.711723] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] f27770de-40f5-4d5a-8819-a62d8f9a320a/f27770de-40f5-4d5a-8819-a62d8f9a320a.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1834.711723] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1834.711723] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64b3feb8-209d-40fc-9232-784b519941ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.721939] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1834.721939] env[62875]: value = "task-2179928" [ 1834.721939] env[62875]: _type = "Task" [ 1834.721939] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.722986] env[62875]: DEBUG nova.network.neutron [req-83ed0878-321e-434e-a59c-b9d2273c7757 req-b82c3924-ea2e-477d-9456-48bffeff4dd0 service nova] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1834.737797] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179928, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.754202] env[62875]: DEBUG nova.network.neutron [-] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.757221] env[62875]: DEBUG nova.compute.utils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1834.765632] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1834.765966] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1834.871708] env[62875]: DEBUG nova.policy [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4031022ab7a4346bdf343da18ff3676', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb011aca8c67403a963b93eb03f665ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1834.878940] env[62875]: DEBUG nova.compute.manager [req-6da24200-bad0-4412-980a-2a6edb78cbfe req-b68812e3-e743-4e5b-a220-55faf6fb375c service nova] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Received event network-changed-c38cd795-ea9d-4e01-a734-b80e9a180e5a {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1834.879355] env[62875]: DEBUG nova.compute.manager [req-6da24200-bad0-4412-980a-2a6edb78cbfe req-b68812e3-e743-4e5b-a220-55faf6fb375c service nova] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Refreshing instance network info cache due to event network-changed-c38cd795-ea9d-4e01-a734-b80e9a180e5a. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1834.879970] env[62875]: DEBUG oslo_concurrency.lockutils [req-6da24200-bad0-4412-980a-2a6edb78cbfe req-b68812e3-e743-4e5b-a220-55faf6fb375c service nova] Acquiring lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.964218] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Releasing lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.964218] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1834.964549] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1834.965130] env[62875]: DEBUG oslo_concurrency.lockutils [req-6da24200-bad0-4412-980a-2a6edb78cbfe req-b68812e3-e743-4e5b-a220-55faf6fb375c service nova] Acquired lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.965459] env[62875]: DEBUG nova.network.neutron [req-6da24200-bad0-4412-980a-2a6edb78cbfe req-b68812e3-e743-4e5b-a220-55faf6fb375c service nova] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Refreshing network info cache for port c38cd795-ea9d-4e01-a734-b80e9a180e5a {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1834.967203] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-610a0c1f-9a4f-44eb-806e-3a537b70b673 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.978247] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41168b52-646c-40ff-b0ad-3bd850b0beff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.018342] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 048df8ed-3be9-430f-8ade-c3cabcb0f16c could not be found. [ 1835.018342] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1835.018342] env[62875]: INFO nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1835.018342] env[62875]: DEBUG oslo.service.loopingcall [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.018342] env[62875]: DEBUG nova.compute.manager [-] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1835.018342] env[62875]: DEBUG nova.network.neutron [-] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1835.059144] env[62875]: DEBUG nova.network.neutron [-] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1835.064227] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Acquiring lock "79d018cc-2400-4925-a09f-e0aaaa8b22db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.064770] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Lock "79d018cc-2400-4925-a09f-e0aaaa8b22db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.113796] env[62875]: DEBUG nova.network.neutron [req-83ed0878-321e-434e-a59c-b9d2273c7757 req-b82c3924-ea2e-477d-9456-48bffeff4dd0 service nova] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.236895] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179928, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070795} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.237201] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1835.239503] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9daff0b-d981-4e32-8a6c-5b18f9e57dfe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.270912] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] f27770de-40f5-4d5a-8819-a62d8f9a320a/f27770de-40f5-4d5a-8819-a62d8f9a320a.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1835.271519] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1835.275019] env[62875]: INFO nova.compute.manager [-] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Took 1.10 seconds to deallocate network for instance. [ 1835.275019] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7943c4ae-66b6-4c45-b2e8-8989b36c4df9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.294706] env[62875]: DEBUG nova.compute.claims [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1835.294910] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.303962] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1835.303962] env[62875]: value = "task-2179929" [ 1835.303962] env[62875]: _type = "Task" [ 1835.303962] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.314776] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179929, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.516354] env[62875]: DEBUG nova.network.neutron [req-6da24200-bad0-4412-980a-2a6edb78cbfe req-b68812e3-e743-4e5b-a220-55faf6fb375c service nova] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1835.548918] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ea2da2-53d5-4e78-a8fb-2b05d878c6d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.564275] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa52e8e-18bc-4357-bd6a-f1e4a9fe16e7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.566944] env[62875]: DEBUG nova.network.neutron [-] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.598544] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812c60f8-1a76-4157-b9e7-da1223fd1d9f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.608468] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c51aec-fc28-4bb7-9ee9-22a298b04c8d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.620788] env[62875]: DEBUG oslo_concurrency.lockutils [req-83ed0878-321e-434e-a59c-b9d2273c7757 req-b82c3924-ea2e-477d-9456-48bffeff4dd0 service nova] Releasing lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.632692] env[62875]: DEBUG nova.compute.provider_tree [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1835.729323] env[62875]: DEBUG nova.network.neutron [req-6da24200-bad0-4412-980a-2a6edb78cbfe req-b68812e3-e743-4e5b-a220-55faf6fb375c service nova] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.818053] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.895480] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Successfully created port: d3af8b79-1c53-42e6-a5b6-9d0429e0773f {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1836.073134] env[62875]: INFO nova.compute.manager [-] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Took 1.05 seconds to deallocate network for instance. [ 1836.074829] env[62875]: DEBUG nova.compute.claims [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1836.075015] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.136300] env[62875]: DEBUG nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1836.218015] env[62875]: DEBUG nova.compute.manager [req-c362ace3-1e4c-43bf-96d4-86c44772bd6d req-e3f6d775-60bb-4ed0-87fd-b71cdd1c6945 service nova] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Received event network-vif-deleted-e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1836.234590] env[62875]: DEBUG oslo_concurrency.lockutils [req-6da24200-bad0-4412-980a-2a6edb78cbfe req-b68812e3-e743-4e5b-a220-55faf6fb375c service nova] Releasing lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.295921] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1836.315690] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179929, 'name': ReconfigVM_Task, 'duration_secs': 0.664343} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.316018] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Reconfigured VM instance instance-00000001 to attach disk [datastore2] f27770de-40f5-4d5a-8819-a62d8f9a320a/f27770de-40f5-4d5a-8819-a62d8f9a320a.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1836.316725] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbdcd938-97d6-459d-8b84-b8fdb0acdef8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.324434] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1836.324434] env[62875]: value = "task-2179930" [ 1836.324434] env[62875]: _type = "Task" [ 1836.324434] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.334809] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1836.334968] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1836.335197] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1836.335487] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1836.335751] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1836.336024] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1836.336174] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1836.336398] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1836.336773] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1836.336872] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1836.337104] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1836.338062] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660b0423-b62f-4111-a04c-f33b62d74e41 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.350018] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7e77a9-1ef8-4f6f-9ffa-eb7086b7a6cf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.354853] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179930, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.644994] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.645548] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1836.650381] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.242s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.651773] env[62875]: INFO nova.compute.claims [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1836.839925] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179930, 'name': Rename_Task, 'duration_secs': 0.154407} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.840421] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1836.840702] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d71d1827-d701-4e2f-a0d8-a92d137ad000 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.850523] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1836.850523] env[62875]: value = "task-2179932" [ 1836.850523] env[62875]: _type = "Task" [ 1836.850523] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.867027] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179932, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.158682] env[62875]: DEBUG nova.compute.utils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1837.169111] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1837.169928] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1837.349735] env[62875]: DEBUG nova.policy [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4031022ab7a4346bdf343da18ff3676', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb011aca8c67403a963b93eb03f665ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1837.364490] env[62875]: DEBUG oslo_vmware.api [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179932, 'name': PowerOnVM_Task, 'duration_secs': 0.49901} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.364751] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1837.364947] env[62875]: INFO nova.compute.manager [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Took 10.43 seconds to spawn the instance on the hypervisor. [ 1837.365207] env[62875]: DEBUG nova.compute.manager [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1837.366030] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d066baf1-3e83-4b03-8526-408ba176919e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.673299] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1837.901736] env[62875]: INFO nova.compute.manager [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Took 15.28 seconds to build instance. [ 1837.972559] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db2d0aa-0448-43c6-bc6e-41ad086e4824 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.980812] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a53b39-23c9-4680-894a-ddb1adf05c90 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.018652] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9af2c6e-4436-42a0-aa0b-b64f2418b65e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.027153] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eafe11c-1761-4896-98a4-1e99c2961301 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.041260] env[62875]: DEBUG nova.compute.provider_tree [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.114690] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. [ 1838.114690] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1838.114690] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1838.114690] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1838.114690] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1838.114690] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1838.114690] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1838.114690] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1838.114690] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1838.114690] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1838.114690] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1838.114690] env[62875]: ERROR nova.compute.manager raise self.value [ 1838.114690] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1838.114690] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1838.114690] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1838.114690] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1838.115133] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1838.115133] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1838.115133] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. [ 1838.115133] env[62875]: ERROR nova.compute.manager [ 1838.115133] env[62875]: Traceback (most recent call last): [ 1838.115133] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1838.115133] env[62875]: listener.cb(fileno) [ 1838.115133] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1838.115133] env[62875]: result = function(*args, **kwargs) [ 1838.115133] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1838.115133] env[62875]: return func(*args, **kwargs) [ 1838.115133] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1838.115133] env[62875]: raise e [ 1838.115133] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1838.115133] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1838.115133] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1838.115133] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1838.115133] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1838.115133] env[62875]: with excutils.save_and_reraise_exception(): [ 1838.115133] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1838.115133] env[62875]: self.force_reraise() [ 1838.115133] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1838.115133] env[62875]: raise self.value [ 1838.115133] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1838.115133] env[62875]: updated_port = self._update_port( [ 1838.115133] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1838.115133] env[62875]: _ensure_no_port_binding_failure(port) [ 1838.115133] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1838.115133] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1838.116353] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. [ 1838.116353] env[62875]: Removing descriptor: 16 [ 1838.116353] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Traceback (most recent call last): [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] yield resources [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self.driver.spawn(context, instance, image_meta, [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1838.116353] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] vm_ref = self.build_virtual_machine(instance, [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] vif_infos = vmwarevif.get_vif_info(self._session, [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] for vif in network_info: [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] return self._sync_wrapper(fn, *args, **kwargs) [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self.wait() [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self[:] = self._gt.wait() [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] return self._exit_event.wait() [ 1838.116674] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] result = hub.switch() [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] return self.greenlet.switch() [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] result = function(*args, **kwargs) [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] return func(*args, **kwargs) [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] raise e [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] nwinfo = self.network_api.allocate_for_instance( [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1838.116994] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] created_port_ids = self._update_ports_for_instance( [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] with excutils.save_and_reraise_exception(): [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self.force_reraise() [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] raise self.value [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] updated_port = self._update_port( [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] _ensure_no_port_binding_failure(port) [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1838.117342] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] raise exception.PortBindingFailed(port_id=port['id']) [ 1838.117685] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] nova.exception.PortBindingFailed: Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. [ 1838.117685] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] [ 1838.117685] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Terminating instance [ 1838.285361] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. [ 1838.285361] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1838.285361] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1838.285361] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1838.285361] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1838.285361] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1838.285361] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1838.285361] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1838.285361] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1838.285361] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1838.285361] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1838.285361] env[62875]: ERROR nova.compute.manager raise self.value [ 1838.285361] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1838.285361] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1838.285361] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1838.285361] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1838.285959] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1838.285959] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1838.285959] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. [ 1838.285959] env[62875]: ERROR nova.compute.manager [ 1838.285959] env[62875]: Traceback (most recent call last): [ 1838.285959] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1838.285959] env[62875]: listener.cb(fileno) [ 1838.285959] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1838.285959] env[62875]: result = function(*args, **kwargs) [ 1838.285959] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1838.285959] env[62875]: return func(*args, **kwargs) [ 1838.285959] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1838.285959] env[62875]: raise e [ 1838.285959] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1838.285959] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1838.285959] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1838.285959] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1838.285959] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1838.285959] env[62875]: with excutils.save_and_reraise_exception(): [ 1838.285959] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1838.285959] env[62875]: self.force_reraise() [ 1838.285959] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1838.285959] env[62875]: raise self.value [ 1838.285959] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1838.285959] env[62875]: updated_port = self._update_port( [ 1838.285959] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1838.285959] env[62875]: _ensure_no_port_binding_failure(port) [ 1838.285959] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1838.285959] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1838.287901] env[62875]: nova.exception.PortBindingFailed: Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. [ 1838.287901] env[62875]: Removing descriptor: 19 [ 1838.287901] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Traceback (most recent call last): [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] yield resources [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self.driver.spawn(context, instance, image_meta, [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1838.287901] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] vm_ref = self.build_virtual_machine(instance, [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] for vif in network_info: [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] return self._sync_wrapper(fn, *args, **kwargs) [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self.wait() [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self[:] = self._gt.wait() [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] return self._exit_event.wait() [ 1838.288235] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] result = hub.switch() [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] return self.greenlet.switch() [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] result = function(*args, **kwargs) [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] return func(*args, **kwargs) [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] raise e [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] nwinfo = self.network_api.allocate_for_instance( [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1838.288556] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] created_port_ids = self._update_ports_for_instance( [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] with excutils.save_and_reraise_exception(): [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self.force_reraise() [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] raise self.value [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] updated_port = self._update_port( [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] _ensure_no_port_binding_failure(port) [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1838.288964] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] raise exception.PortBindingFailed(port_id=port['id']) [ 1838.289310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] nova.exception.PortBindingFailed: Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. [ 1838.289310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] [ 1838.289310] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Terminating instance [ 1838.374220] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Successfully created port: 454ce4f2-c67c-473c-a97b-86de05e81627 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1838.404105] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9ded3587-c63d-4cff-bc26-241d04ad9c5c tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "f27770de-40f5-4d5a-8819-a62d8f9a320a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.793s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.546443] env[62875]: DEBUG nova.scheduler.client.report [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1838.597388] env[62875]: DEBUG nova.compute.manager [req-94d5bec9-3927-4822-9fc6-8453f8e2d9a6 req-c86f9dc2-eda2-49bf-8f9f-8276e59d71f0 service nova] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Received event network-vif-deleted-c38cd795-ea9d-4e01-a734-b80e9a180e5a {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1838.622368] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.622569] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquired lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.622865] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1838.684519] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1838.725341] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1838.725582] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1838.725792] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1838.726015] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1838.732901] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1838.732901] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1838.732901] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1838.733126] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1838.733161] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1838.733334] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1838.733781] env[62875]: DEBUG nova.virt.hardware [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1838.735159] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90b395e-49fc-4bef-b519-fedeed997cee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.746647] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45163ffe-f222-4ce8-949e-c694bc073184 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.796228] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.797732] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquired lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.797732] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1838.912030] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1839.054394] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.055220] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1839.057584] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.276s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.059367] env[62875]: INFO nova.compute.claims [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1839.064269] env[62875]: DEBUG nova.compute.manager [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Received event network-changed-73ed3ab8-2695-4597-9f6d-15d35a9ebc69 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1839.064523] env[62875]: DEBUG nova.compute.manager [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Refreshing instance network info cache due to event network-changed-73ed3ab8-2695-4597-9f6d-15d35a9ebc69. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1839.064589] env[62875]: DEBUG oslo_concurrency.lockutils [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] Acquiring lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.171510] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1839.354455] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.451149] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.512766] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1839.564746] env[62875]: DEBUG nova.compute.utils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1839.569614] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1839.569614] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1839.709086] env[62875]: DEBUG nova.policy [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '408a9aec437748e9856bc5b42410ca1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa6cbebe3e09457d97c03b4fb03c3825', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1839.782321] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.858457] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Releasing lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.858998] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1839.859227] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1839.859544] env[62875]: DEBUG oslo_concurrency.lockutils [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] Acquired lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.859950] env[62875]: DEBUG nova.network.neutron [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Refreshing network info cache for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1839.865352] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89bb5c39-ac5a-467b-8bb1-5ca04f3f46dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.880082] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4826e705-e062-4726-adc2-74a3ae767a5a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.920380] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 15f747f0-43c0-4580-ab1c-28eadade4b82 could not be found. [ 1839.921233] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1839.921326] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1839.921817] env[62875]: DEBUG oslo.service.loopingcall [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1839.922047] env[62875]: DEBUG nova.compute.manager [-] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1839.922184] env[62875]: DEBUG nova.network.neutron [-] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1840.009726] env[62875]: DEBUG nova.network.neutron [-] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1840.070931] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1840.285420] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Releasing lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.288846] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1840.288846] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1840.288846] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-204adbe9-909b-4353-b116-3aa25eb44d8d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.305026] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd550eb-86f7-49dc-b01d-6ef65aea58ec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.338030] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e41036b6-5ff4-4263-b319-9627b176b2dc could not be found. [ 1840.338030] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1840.338030] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1840.338030] env[62875]: DEBUG oslo.service.loopingcall [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.340702] env[62875]: DEBUG nova.compute.manager [-] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1840.340702] env[62875]: DEBUG nova.network.neutron [-] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1840.381405] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858bd1b3-3af6-4b43-8b53-137813737b04 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.390666] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32b75da-a5f0-4791-8c3e-a10ba1333f4d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.425716] env[62875]: DEBUG nova.network.neutron [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1840.428089] env[62875]: DEBUG nova.network.neutron [-] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1840.430315] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10eb9ac3-9347-4e9e-b84f-97efa2041fdb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.440783] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336ea3ed-5e67-45ab-8f2f-8da9a477490e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.460290] env[62875]: DEBUG nova.compute.provider_tree [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1840.513904] env[62875]: DEBUG nova.network.neutron [-] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.708672] env[62875]: DEBUG nova.network.neutron [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.724189] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Successfully created port: e0a41d02-7676-444c-89da-b83506568fa2 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1840.847729] env[62875]: DEBUG nova.compute.manager [None req-e096ce9b-eef9-4a75-9b09-50361c96b5d1 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1840.848850] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c49b4c-ec02-4f0a-b740-35279bfb077a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.936106] env[62875]: DEBUG nova.network.neutron [-] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.965134] env[62875]: DEBUG nova.scheduler.client.report [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1841.016647] env[62875]: INFO nova.compute.manager [-] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Took 1.09 seconds to deallocate network for instance. [ 1841.019297] env[62875]: DEBUG nova.compute.claims [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1841.019569] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.086426] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1841.121338] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1841.121592] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1841.121747] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1841.121929] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1841.122091] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1841.123031] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1841.123031] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1841.123031] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1841.123031] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1841.123466] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1841.123709] env[62875]: DEBUG nova.virt.hardware [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1841.124638] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e068785-d76c-4033-b9b0-54e057b1db2e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.134460] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb7bfc5-bf96-4091-8b50-cdd760b5d63d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.211974] env[62875]: DEBUG oslo_concurrency.lockutils [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] Releasing lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.212565] env[62875]: DEBUG nova.compute.manager [req-101bd348-7c8a-4ca6-bd94-d70a840ed76f req-c1d6d33a-8926-40a3-a447-14419bc88017 service nova] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Received event network-vif-deleted-73ed3ab8-2695-4597-9f6d-15d35a9ebc69 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1841.366462] env[62875]: INFO nova.compute.manager [None req-e096ce9b-eef9-4a75-9b09-50361c96b5d1 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] instance snapshotting [ 1841.367118] env[62875]: DEBUG nova.objects.instance [None req-e096ce9b-eef9-4a75-9b09-50361c96b5d1 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lazy-loading 'flavor' on Instance uuid f27770de-40f5-4d5a-8819-a62d8f9a320a {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.438028] env[62875]: INFO nova.compute.manager [-] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Took 1.10 seconds to deallocate network for instance. [ 1841.442566] env[62875]: DEBUG nova.compute.claims [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1841.442566] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.470077] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.470428] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1841.474776] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.094s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.475064] env[62875]: INFO nova.compute.claims [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1841.878480] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3026f6-ffb0-4f2f-a357-25f8d594a2ac {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.901255] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca30a0b-5aed-4d4f-8f82-01ce263040d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.982446] env[62875]: DEBUG nova.compute.utils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1841.984500] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1841.984775] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1842.169384] env[62875]: DEBUG nova.policy [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b588437b4d245b5aec908f9e5a6aa4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff3480f0d3314216a8f3f0c126761836', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1842.187174] env[62875]: DEBUG nova.compute.manager [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Received event network-changed-d3af8b79-1c53-42e6-a5b6-9d0429e0773f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1842.187408] env[62875]: DEBUG nova.compute.manager [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Refreshing instance network info cache due to event network-changed-d3af8b79-1c53-42e6-a5b6-9d0429e0773f. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1842.187599] env[62875]: DEBUG oslo_concurrency.lockutils [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] Acquiring lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.187766] env[62875]: DEBUG oslo_concurrency.lockutils [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] Acquired lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.187925] env[62875]: DEBUG nova.network.neutron [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Refreshing network info cache for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1842.463186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquiring lock "f27770de-40f5-4d5a-8819-a62d8f9a320a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.463475] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "f27770de-40f5-4d5a-8819-a62d8f9a320a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.464014] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquiring lock "f27770de-40f5-4d5a-8819-a62d8f9a320a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.464014] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "f27770de-40f5-4d5a-8819-a62d8f9a320a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.464133] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "f27770de-40f5-4d5a-8819-a62d8f9a320a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.466754] env[62875]: INFO nova.compute.manager [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Terminating instance [ 1842.484770] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1842.755020] env[62875]: DEBUG nova.network.neutron [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1842.782401] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0f296f-f449-4106-9ef6-101c3f7ae156 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.791547] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad098ab-ccde-4151-8594-d6b67af0251d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.834037] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490251b7-cf8a-428c-b64d-78daa748646d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.842563] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec77bec-fac9-4bab-be58-2a596e536ba3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.861310] env[62875]: DEBUG nova.compute.provider_tree [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.897608] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. [ 1842.897608] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1842.897608] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1842.897608] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1842.897608] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1842.897608] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1842.897608] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1842.897608] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1842.897608] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.897608] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1842.897608] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.897608] env[62875]: ERROR nova.compute.manager raise self.value [ 1842.897608] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1842.897608] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1842.897608] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1842.897608] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1842.898217] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1842.898217] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1842.898217] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. [ 1842.898217] env[62875]: ERROR nova.compute.manager [ 1842.898217] env[62875]: Traceback (most recent call last): [ 1842.898217] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1842.898217] env[62875]: listener.cb(fileno) [ 1842.898217] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1842.898217] env[62875]: result = function(*args, **kwargs) [ 1842.898217] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1842.898217] env[62875]: return func(*args, **kwargs) [ 1842.898217] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1842.898217] env[62875]: raise e [ 1842.898217] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1842.898217] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1842.898217] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1842.898217] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1842.898217] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1842.898217] env[62875]: with excutils.save_and_reraise_exception(): [ 1842.898217] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.898217] env[62875]: self.force_reraise() [ 1842.898217] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.898217] env[62875]: raise self.value [ 1842.898217] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1842.898217] env[62875]: updated_port = self._update_port( [ 1842.898217] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1842.898217] env[62875]: _ensure_no_port_binding_failure(port) [ 1842.898217] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1842.898217] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1842.899876] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. [ 1842.899876] env[62875]: Removing descriptor: 18 [ 1842.899876] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Traceback (most recent call last): [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] yield resources [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self.driver.spawn(context, instance, image_meta, [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1842.899876] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] vm_ref = self.build_virtual_machine(instance, [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] vif_infos = vmwarevif.get_vif_info(self._session, [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] for vif in network_info: [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] return self._sync_wrapper(fn, *args, **kwargs) [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self.wait() [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self[:] = self._gt.wait() [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] return self._exit_event.wait() [ 1842.900232] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] result = hub.switch() [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] return self.greenlet.switch() [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] result = function(*args, **kwargs) [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] return func(*args, **kwargs) [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] raise e [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] nwinfo = self.network_api.allocate_for_instance( [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1842.900558] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] created_port_ids = self._update_ports_for_instance( [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] with excutils.save_and_reraise_exception(): [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self.force_reraise() [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] raise self.value [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] updated_port = self._update_port( [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] _ensure_no_port_binding_failure(port) [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1842.900879] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] raise exception.PortBindingFailed(port_id=port['id']) [ 1842.901182] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] nova.exception.PortBindingFailed: Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. [ 1842.901182] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] [ 1842.901182] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Terminating instance [ 1842.971395] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquiring lock "refresh_cache-f27770de-40f5-4d5a-8819-a62d8f9a320a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.971617] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquired lock "refresh_cache-f27770de-40f5-4d5a-8819-a62d8f9a320a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.971768] env[62875]: DEBUG nova.network.neutron [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1843.040793] env[62875]: DEBUG nova.network.neutron [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.363959] env[62875]: DEBUG nova.scheduler.client.report [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1843.406403] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.407635] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquired lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.407635] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1843.424232] env[62875]: DEBUG nova.compute.manager [None req-e096ce9b-eef9-4a75-9b09-50361c96b5d1 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Instance disappeared during snapshot {{(pid=62875) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1843.493820] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1843.526234] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1843.526380] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1843.526459] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1843.526643] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1843.527049] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1843.527049] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1843.528118] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1843.528312] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1843.528984] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1843.528984] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1843.528984] env[62875]: DEBUG nova.virt.hardware [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1843.530030] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b9f964-b2f7-4e6a-9206-d76110402f88 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.540672] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1053bada-1270-4864-9f9a-9185e5b886e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.546297] env[62875]: DEBUG oslo_concurrency.lockutils [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] Releasing lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.546667] env[62875]: DEBUG nova.compute.manager [req-3e32a5ee-145d-408f-8220-c0a3a832b357 req-309758ca-195a-4e54-9f7b-ffcc20797dc1 service nova] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Received event network-vif-deleted-d3af8b79-1c53-42e6-a5b6-9d0429e0773f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1843.564412] env[62875]: DEBUG nova.network.neutron [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1843.726860] env[62875]: DEBUG nova.compute.manager [req-af4aaa95-f6c1-4301-a944-874a7ec13c2d req-232fa092-5729-4ac2-a111-8c4a013906e2 service nova] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Received event network-changed-454ce4f2-c67c-473c-a97b-86de05e81627 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1843.726860] env[62875]: DEBUG nova.compute.manager [req-af4aaa95-f6c1-4301-a944-874a7ec13c2d req-232fa092-5729-4ac2-a111-8c4a013906e2 service nova] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Refreshing instance network info cache due to event network-changed-454ce4f2-c67c-473c-a97b-86de05e81627. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1843.726860] env[62875]: DEBUG oslo_concurrency.lockutils [req-af4aaa95-f6c1-4301-a944-874a7ec13c2d req-232fa092-5729-4ac2-a111-8c4a013906e2 service nova] Acquiring lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.757119] env[62875]: ERROR nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. [ 1843.757119] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1843.757119] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1843.757119] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1843.757119] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1843.757119] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1843.757119] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1843.757119] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1843.757119] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1843.757119] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1843.757119] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1843.757119] env[62875]: ERROR nova.compute.manager raise self.value [ 1843.757119] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1843.757119] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1843.757119] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1843.757119] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1843.757694] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1843.757694] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1843.757694] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. [ 1843.757694] env[62875]: ERROR nova.compute.manager [ 1843.757694] env[62875]: Traceback (most recent call last): [ 1843.757694] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1843.757694] env[62875]: listener.cb(fileno) [ 1843.757694] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1843.757694] env[62875]: result = function(*args, **kwargs) [ 1843.757694] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1843.757694] env[62875]: return func(*args, **kwargs) [ 1843.757694] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1843.757694] env[62875]: raise e [ 1843.757694] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1843.757694] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1843.757694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1843.757694] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1843.757694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1843.757694] env[62875]: with excutils.save_and_reraise_exception(): [ 1843.757694] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1843.757694] env[62875]: self.force_reraise() [ 1843.757694] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1843.757694] env[62875]: raise self.value [ 1843.757694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1843.757694] env[62875]: updated_port = self._update_port( [ 1843.757694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1843.757694] env[62875]: _ensure_no_port_binding_failure(port) [ 1843.757694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1843.757694] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1843.758579] env[62875]: nova.exception.PortBindingFailed: Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. [ 1843.758579] env[62875]: Removing descriptor: 19 [ 1843.758579] env[62875]: ERROR nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Traceback (most recent call last): [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] yield resources [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self.driver.spawn(context, instance, image_meta, [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1843.758579] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] vm_ref = self.build_virtual_machine(instance, [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] vif_infos = vmwarevif.get_vif_info(self._session, [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] for vif in network_info: [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] return self._sync_wrapper(fn, *args, **kwargs) [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self.wait() [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self[:] = self._gt.wait() [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] return self._exit_event.wait() [ 1843.759065] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] result = hub.switch() [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] return self.greenlet.switch() [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] result = function(*args, **kwargs) [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] return func(*args, **kwargs) [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] raise e [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] nwinfo = self.network_api.allocate_for_instance( [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1843.759431] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] created_port_ids = self._update_ports_for_instance( [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] with excutils.save_and_reraise_exception(): [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self.force_reraise() [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] raise self.value [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] updated_port = self._update_port( [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] _ensure_no_port_binding_failure(port) [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1843.759885] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] raise exception.PortBindingFailed(port_id=port['id']) [ 1843.760886] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] nova.exception.PortBindingFailed: Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. [ 1843.760886] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] [ 1843.760886] env[62875]: INFO nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Terminating instance [ 1843.773780] env[62875]: DEBUG nova.compute.manager [None req-e096ce9b-eef9-4a75-9b09-50361c96b5d1 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Found 0 images (rotation: 2) {{(pid=62875) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1843.792150] env[62875]: DEBUG nova.network.neutron [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.875021] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.875021] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1843.876337] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.475s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.879180] env[62875]: INFO nova.compute.claims [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1843.917950] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Successfully created port: b1b24ba0-e81a-43d1-9354-6c09b85dd533 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1843.937204] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1843.947431] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Acquiring lock "4f036275-fd33-440d-acba-6e475cda62c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.948026] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Lock "4f036275-fd33-440d-acba-6e475cda62c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.073761] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.265608] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Acquiring lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.265608] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Acquired lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.265608] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1844.293483] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Releasing lock "refresh_cache-f27770de-40f5-4d5a-8819-a62d8f9a320a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.296882] env[62875]: DEBUG nova.compute.manager [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1844.296882] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1844.296882] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b503ae-923b-432a-b231-4f3be31fdbbf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.306555] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.306812] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78e4419e-6cec-4812-bc2b-a60a14790825 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.319367] env[62875]: DEBUG oslo_vmware.api [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1844.319367] env[62875]: value = "task-2179936" [ 1844.319367] env[62875]: _type = "Task" [ 1844.319367] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.329747] env[62875]: DEBUG oslo_vmware.api [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.386257] env[62875]: DEBUG nova.compute.utils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1844.393953] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1844.394201] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1844.526963] env[62875]: DEBUG nova.policy [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd431874c833546edb5cb056050932e33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af2975efd73d4429a8a1e0cbbbbd9d78', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1844.581191] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Releasing lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.581191] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1844.581191] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1844.581191] env[62875]: DEBUG oslo_concurrency.lockutils [req-af4aaa95-f6c1-4301-a944-874a7ec13c2d req-232fa092-5729-4ac2-a111-8c4a013906e2 service nova] Acquired lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.581871] env[62875]: DEBUG nova.network.neutron [req-af4aaa95-f6c1-4301-a944-874a7ec13c2d req-232fa092-5729-4ac2-a111-8c4a013906e2 service nova] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Refreshing network info cache for port 454ce4f2-c67c-473c-a97b-86de05e81627 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1844.588872] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9af0a53f-4d24-45aa-b6f3-9fa05caecbd1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.606699] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c5e101-ba57-4697-82af-3913d55990a5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.642090] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f013c0a3-fb9c-480b-b479-e81c1b1e8234 could not be found. [ 1844.642360] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1844.642633] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1844.642854] env[62875]: DEBUG oslo.service.loopingcall [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1844.643149] env[62875]: DEBUG nova.compute.manager [-] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1844.643213] env[62875]: DEBUG nova.network.neutron [-] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1844.680711] env[62875]: DEBUG nova.network.neutron [-] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1844.812053] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1844.832588] env[62875]: DEBUG oslo_vmware.api [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179936, 'name': PowerOffVM_Task, 'duration_secs': 0.141779} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.833107] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1844.833401] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1844.833765] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4cd0d0d9-172e-4cc0-ab60-5c12f45baa4a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.869524] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1844.869524] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1844.869892] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Deleting the datastore file [datastore2] f27770de-40f5-4d5a-8819-a62d8f9a320a {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1844.870067] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d5f2165-cfb2-4003-985f-33249489a7d5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.879263] env[62875]: DEBUG oslo_vmware.api [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for the task: (returnval){ [ 1844.879263] env[62875]: value = "task-2179938" [ 1844.879263] env[62875]: _type = "Task" [ 1844.879263] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.890825] env[62875]: DEBUG oslo_vmware.api [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.897833] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1845.014531] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.115154] env[62875]: DEBUG nova.network.neutron [req-af4aaa95-f6c1-4301-a944-874a7ec13c2d req-232fa092-5729-4ac2-a111-8c4a013906e2 service nova] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1845.133417] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Acquiring lock "edf6724b-3a8c-4c19-926e-8f1b081ab50f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.133751] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Lock "edf6724b-3a8c-4c19-926e-8f1b081ab50f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.185375] env[62875]: DEBUG nova.network.neutron [-] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.248020] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099e1cb6-af09-4745-a47c-4f4c37b36127 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.257617] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df68ac5-4493-46f9-bca8-dc37cd2875a5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.293415] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bfd3d0-7edc-455f-9f8a-fd38b6418664 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.296700] env[62875]: DEBUG nova.network.neutron [req-af4aaa95-f6c1-4301-a944-874a7ec13c2d req-232fa092-5729-4ac2-a111-8c4a013906e2 service nova] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.306905] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cd4eca-5ec6-4adf-8939-26a41d2b537e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.318825] env[62875]: DEBUG nova.compute.provider_tree [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1845.392453] env[62875]: DEBUG oslo_vmware.api [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Task: {'id': task-2179938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106056} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.392453] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1845.392453] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1845.392453] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1845.392604] env[62875]: INFO nova.compute.manager [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1845.393140] env[62875]: DEBUG oslo.service.loopingcall [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.393388] env[62875]: DEBUG nova.compute.manager [-] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1845.393487] env[62875]: DEBUG nova.network.neutron [-] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1845.519647] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Releasing lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.520233] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1845.520343] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1845.521033] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-239f3389-d247-4200-997c-a3e7527b6559 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.533825] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f00ce7-69f9-4518-90b6-b46174a018db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.549163] env[62875]: DEBUG nova.network.neutron [-] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1845.566988] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 24f59fd1-63bf-4292-81f1-dc762510151f could not be found. [ 1845.567424] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1845.567622] env[62875]: INFO nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1845.567866] env[62875]: DEBUG oslo.service.loopingcall [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.568117] env[62875]: DEBUG nova.compute.manager [-] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1845.568215] env[62875]: DEBUG nova.network.neutron [-] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1845.596408] env[62875]: DEBUG nova.network.neutron [-] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1845.689304] env[62875]: INFO nova.compute.manager [-] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Took 1.05 seconds to deallocate network for instance. [ 1845.691665] env[62875]: DEBUG nova.compute.claims [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1845.691851] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.799657] env[62875]: DEBUG oslo_concurrency.lockutils [req-af4aaa95-f6c1-4301-a944-874a7ec13c2d req-232fa092-5729-4ac2-a111-8c4a013906e2 service nova] Releasing lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.824222] env[62875]: DEBUG nova.scheduler.client.report [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1845.873429] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Successfully created port: 005b6c7e-e1e5-474e-963d-bba5887c69db {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1845.909112] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1845.939928] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1845.940225] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1845.940379] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1845.940744] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1845.940744] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1845.940886] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1845.941593] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1845.941964] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1845.942094] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1845.942541] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1845.942731] env[62875]: DEBUG nova.virt.hardware [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1845.943747] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084f87da-41b5-4838-b60e-8d495e907a6c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.954072] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe842f6-7805-4b66-93e7-1dcf484b7c9e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.009842] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.010361] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.010496] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.010700] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.010928] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.011093] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1846.052166] env[62875]: DEBUG nova.network.neutron [-] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.099741] env[62875]: DEBUG nova.network.neutron [-] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.301813] env[62875]: ERROR nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. [ 1846.301813] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1846.301813] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1846.301813] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1846.301813] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1846.301813] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1846.301813] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1846.301813] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1846.301813] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1846.301813] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1846.301813] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1846.301813] env[62875]: ERROR nova.compute.manager raise self.value [ 1846.301813] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1846.301813] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1846.301813] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1846.301813] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1846.303715] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1846.303715] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1846.303715] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. [ 1846.303715] env[62875]: ERROR nova.compute.manager [ 1846.303715] env[62875]: Traceback (most recent call last): [ 1846.303715] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1846.303715] env[62875]: listener.cb(fileno) [ 1846.303715] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1846.303715] env[62875]: result = function(*args, **kwargs) [ 1846.303715] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1846.303715] env[62875]: return func(*args, **kwargs) [ 1846.303715] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1846.303715] env[62875]: raise e [ 1846.303715] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1846.303715] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1846.303715] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1846.303715] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1846.303715] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1846.303715] env[62875]: with excutils.save_and_reraise_exception(): [ 1846.303715] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1846.303715] env[62875]: self.force_reraise() [ 1846.303715] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1846.303715] env[62875]: raise self.value [ 1846.303715] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1846.303715] env[62875]: updated_port = self._update_port( [ 1846.303715] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1846.303715] env[62875]: _ensure_no_port_binding_failure(port) [ 1846.303715] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1846.303715] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1846.304752] env[62875]: nova.exception.PortBindingFailed: Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. [ 1846.304752] env[62875]: Removing descriptor: 21 [ 1846.304752] env[62875]: ERROR nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Traceback (most recent call last): [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] yield resources [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self.driver.spawn(context, instance, image_meta, [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1846.304752] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] vm_ref = self.build_virtual_machine(instance, [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] vif_infos = vmwarevif.get_vif_info(self._session, [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] for vif in network_info: [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] return self._sync_wrapper(fn, *args, **kwargs) [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self.wait() [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self[:] = self._gt.wait() [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] return self._exit_event.wait() [ 1846.305100] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] result = hub.switch() [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] return self.greenlet.switch() [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] result = function(*args, **kwargs) [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] return func(*args, **kwargs) [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] raise e [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] nwinfo = self.network_api.allocate_for_instance( [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1846.305494] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] created_port_ids = self._update_ports_for_instance( [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] with excutils.save_and_reraise_exception(): [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self.force_reraise() [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] raise self.value [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] updated_port = self._update_port( [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] _ensure_no_port_binding_failure(port) [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1846.305850] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] raise exception.PortBindingFailed(port_id=port['id']) [ 1846.307444] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] nova.exception.PortBindingFailed: Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. [ 1846.307444] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] [ 1846.307444] env[62875]: INFO nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Terminating instance [ 1846.330312] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.334304] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1846.334304] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.039s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.556586] env[62875]: INFO nova.compute.manager [-] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Took 1.16 seconds to deallocate network for instance. [ 1846.572350] env[62875]: DEBUG nova.compute.manager [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Received event network-changed-e0a41d02-7676-444c-89da-b83506568fa2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1846.572531] env[62875]: DEBUG nova.compute.manager [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Refreshing instance network info cache due to event network-changed-e0a41d02-7676-444c-89da-b83506568fa2. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1846.572762] env[62875]: DEBUG oslo_concurrency.lockutils [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] Acquiring lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.573476] env[62875]: DEBUG oslo_concurrency.lockutils [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] Acquired lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.573476] env[62875]: DEBUG nova.network.neutron [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Refreshing network info cache for port e0a41d02-7676-444c-89da-b83506568fa2 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1846.606058] env[62875]: INFO nova.compute.manager [-] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Took 1.04 seconds to deallocate network for instance. [ 1846.611289] env[62875]: DEBUG nova.compute.claims [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1846.611289] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.812280] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Acquiring lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.812280] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Acquired lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.812456] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1846.842316] env[62875]: DEBUG nova.compute.utils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1846.848018] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1846.848018] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1847.066571] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.074969] env[62875]: DEBUG nova.policy [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07cb7bb328234b8a89bc06401d064c09', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d014563e5f84703abaa60d835d27024', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1847.120383] env[62875]: DEBUG nova.network.neutron [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1847.157593] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c73664a-257e-46b8-b5dc-723592ff132a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.173846] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1027d5-0611-4e13-a99b-335b309615c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.206378] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36835b56-0370-46e5-99ef-971b2db51fec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.215647] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621809a1-71b4-4641-b944-65f66300e224 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.230802] env[62875]: DEBUG nova.compute.provider_tree [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1847.350578] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1847.359218] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1847.380643] env[62875]: DEBUG nova.network.neutron [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.474018] env[62875]: DEBUG nova.compute.manager [req-525dced1-60a1-4261-b2ab-344896a6ce98 req-bfd02c4b-b914-4de0-ad76-a2c424004b6b service nova] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Received event network-vif-deleted-454ce4f2-c67c-473c-a97b-86de05e81627 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1847.581180] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.707090] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1847.734940] env[62875]: DEBUG nova.scheduler.client.report [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1847.886109] env[62875]: DEBUG oslo_concurrency.lockutils [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] Releasing lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.886466] env[62875]: DEBUG nova.compute.manager [req-c2c882ac-0fde-4ecf-8fab-dd4df266b03f req-6757fd77-8b64-493d-962c-0c26e86a1b92 service nova] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Received event network-vif-deleted-e0a41d02-7676-444c-89da-b83506568fa2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1848.087158] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Releasing lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.087158] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1848.087158] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1848.087158] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-528c7c10-8c48-448f-9764-fa1af7bdc90c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.096492] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02234c8-f442-4665-8052-5abcbe6a3989 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.128603] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ec33d0e1-4435-45c3-8ecf-33465cafda0e could not be found. [ 1848.128977] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1848.129320] env[62875]: INFO nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1848.130458] env[62875]: DEBUG oslo.service.loopingcall [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1848.130458] env[62875]: DEBUG nova.compute.manager [-] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1848.130458] env[62875]: DEBUG nova.network.neutron [-] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1848.176747] env[62875]: DEBUG nova.network.neutron [-] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1848.220488] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Successfully created port: 09a14792-afd5-4d6d-8a3e-6b7346b95ecc {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1848.242313] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.908s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.242989] env[62875]: ERROR nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Traceback (most recent call last): [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self.driver.spawn(context, instance, image_meta, [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] vm_ref = self.build_virtual_machine(instance, [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] vif_infos = vmwarevif.get_vif_info(self._session, [ 1848.242989] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] for vif in network_info: [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] return self._sync_wrapper(fn, *args, **kwargs) [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self.wait() [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self[:] = self._gt.wait() [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] return self._exit_event.wait() [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] result = hub.switch() [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1848.243334] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] return self.greenlet.switch() [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] result = function(*args, **kwargs) [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] return func(*args, **kwargs) [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] raise e [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] nwinfo = self.network_api.allocate_for_instance( [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] created_port_ids = self._update_ports_for_instance( [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] with excutils.save_and_reraise_exception(): [ 1848.243668] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] self.force_reraise() [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] raise self.value [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] updated_port = self._update_port( [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] _ensure_no_port_binding_failure(port) [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] raise exception.PortBindingFailed(port_id=port['id']) [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] nova.exception.PortBindingFailed: Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. [ 1848.244075] env[62875]: ERROR nova.compute.manager [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] [ 1848.244354] env[62875]: DEBUG nova.compute.utils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1848.246973] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.172s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.253245] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Build of instance f2891dfe-2464-4699-b4cd-54fa97cfb305 was re-scheduled: Binding failed for port e8ef20dc-49f8-4d9d-a15e-516fa03c4c7d, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1848.253726] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1848.253955] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Acquiring lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1848.254110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Acquired lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1848.254265] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1848.365242] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1848.398353] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1848.398598] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1848.398796] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1848.398935] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1848.399502] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1848.399901] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1848.400419] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1848.400672] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1848.401159] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1848.401447] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1848.401787] env[62875]: DEBUG nova.virt.hardware [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1848.402884] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3726109-c154-437d-8c82-336192fef70b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.413220] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a4ac5d-2bd5-4bfc-a2af-a947cdcaba6e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.680197] env[62875]: DEBUG nova.network.neutron [-] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.706632] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1848.793818] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1849.117760] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b865585e-109b-4da9-9366-75111660f7d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.126849] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dbec1a-54bf-401e-920d-273e1cc7d05e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.132032] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.165271] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066a7560-4303-4fa3-b705-9efccd27627c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.175733] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aaed77d-7a1b-430d-9d0d-44c5362fbde4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.184029] env[62875]: INFO nova.compute.manager [-] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Took 1.05 seconds to deallocate network for instance. [ 1849.194885] env[62875]: DEBUG nova.compute.provider_tree [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.197045] env[62875]: DEBUG nova.compute.claims [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1849.197045] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.211725] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.492384] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "820d7177-3e8f-4dd7-b7c3-c7abd4a62158" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.492887] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "820d7177-3e8f-4dd7-b7c3-c7abd4a62158" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.495279] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Acquiring lock "2896c309-a702-498b-8b44-f01620b597e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.495343] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Lock "2896c309-a702-498b-8b44-f01620b597e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.638044] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Releasing lock "refresh_cache-f2891dfe-2464-4699-b4cd-54fa97cfb305" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1849.638044] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1849.638044] env[62875]: DEBUG nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1849.638044] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1849.671036] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1849.698954] env[62875]: DEBUG nova.scheduler.client.report [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1849.830755] env[62875]: ERROR nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. [ 1849.830755] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1849.830755] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1849.830755] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1849.830755] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1849.830755] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1849.830755] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1849.830755] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1849.830755] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1849.830755] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1849.830755] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1849.830755] env[62875]: ERROR nova.compute.manager raise self.value [ 1849.830755] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1849.830755] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1849.830755] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1849.830755] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1849.831365] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1849.831365] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1849.831365] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. [ 1849.831365] env[62875]: ERROR nova.compute.manager [ 1849.831365] env[62875]: Traceback (most recent call last): [ 1849.831365] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1849.831365] env[62875]: listener.cb(fileno) [ 1849.831365] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1849.831365] env[62875]: result = function(*args, **kwargs) [ 1849.831365] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1849.831365] env[62875]: return func(*args, **kwargs) [ 1849.831365] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1849.831365] env[62875]: raise e [ 1849.831365] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1849.831365] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1849.831365] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1849.831365] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1849.831365] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1849.831365] env[62875]: with excutils.save_and_reraise_exception(): [ 1849.831365] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1849.831365] env[62875]: self.force_reraise() [ 1849.831365] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1849.831365] env[62875]: raise self.value [ 1849.831365] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1849.831365] env[62875]: updated_port = self._update_port( [ 1849.831365] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1849.831365] env[62875]: _ensure_no_port_binding_failure(port) [ 1849.831365] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1849.831365] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1849.832205] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. [ 1849.832205] env[62875]: Removing descriptor: 16 [ 1849.832205] env[62875]: ERROR nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] Traceback (most recent call last): [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] yield resources [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self.driver.spawn(context, instance, image_meta, [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1849.832205] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] vm_ref = self.build_virtual_machine(instance, [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] for vif in network_info: [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] return self._sync_wrapper(fn, *args, **kwargs) [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self.wait() [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self[:] = self._gt.wait() [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] return self._exit_event.wait() [ 1849.832560] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] result = hub.switch() [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] return self.greenlet.switch() [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] result = function(*args, **kwargs) [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] return func(*args, **kwargs) [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] raise e [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] nwinfo = self.network_api.allocate_for_instance( [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1849.832900] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] created_port_ids = self._update_ports_for_instance( [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] with excutils.save_and_reraise_exception(): [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self.force_reraise() [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] raise self.value [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] updated_port = self._update_port( [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] _ensure_no_port_binding_failure(port) [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1849.833245] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] raise exception.PortBindingFailed(port_id=port['id']) [ 1849.833661] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] nova.exception.PortBindingFailed: Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. [ 1849.833661] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] [ 1849.833661] env[62875]: INFO nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Terminating instance [ 1850.174013] env[62875]: DEBUG nova.network.neutron [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.204260] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.957s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.204958] env[62875]: ERROR nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Traceback (most recent call last): [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self.driver.spawn(context, instance, image_meta, [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] vm_ref = self.build_virtual_machine(instance, [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] vif_infos = vmwarevif.get_vif_info(self._session, [ 1850.204958] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] for vif in network_info: [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] return self._sync_wrapper(fn, *args, **kwargs) [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self.wait() [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self[:] = self._gt.wait() [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] return self._exit_event.wait() [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] result = hub.switch() [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1850.206643] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] return self.greenlet.switch() [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] result = function(*args, **kwargs) [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] return func(*args, **kwargs) [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] raise e [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] nwinfo = self.network_api.allocate_for_instance( [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] created_port_ids = self._update_ports_for_instance( [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] with excutils.save_and_reraise_exception(): [ 1850.208139] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] self.force_reraise() [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] raise self.value [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] updated_port = self._update_port( [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] _ensure_no_port_binding_failure(port) [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] raise exception.PortBindingFailed(port_id=port['id']) [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] nova.exception.PortBindingFailed: Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. [ 1850.208561] env[62875]: ERROR nova.compute.manager [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] [ 1850.208941] env[62875]: DEBUG nova.compute.utils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1850.208941] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.757s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.208941] env[62875]: INFO nova.compute.claims [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1850.211862] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Build of instance 048df8ed-3be9-430f-8ade-c3cabcb0f16c was re-scheduled: Binding failed for port c38cd795-ea9d-4e01-a734-b80e9a180e5a, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1850.212422] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1850.212650] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.212798] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquired lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.212956] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1850.339191] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Acquiring lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.339387] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Acquired lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.339569] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1850.683883] env[62875]: INFO nova.compute.manager [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] [instance: f2891dfe-2464-4699-b4cd-54fa97cfb305] Took 1.05 seconds to deallocate network for instance. [ 1850.716693] env[62875]: DEBUG nova.compute.manager [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Received event network-changed-b1b24ba0-e81a-43d1-9354-6c09b85dd533 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1850.716909] env[62875]: DEBUG nova.compute.manager [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Refreshing instance network info cache due to event network-changed-b1b24ba0-e81a-43d1-9354-6c09b85dd533. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1850.717160] env[62875]: DEBUG oslo_concurrency.lockutils [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] Acquiring lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.717283] env[62875]: DEBUG oslo_concurrency.lockutils [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] Acquired lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.717478] env[62875]: DEBUG nova.network.neutron [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Refreshing network info cache for port b1b24ba0-e81a-43d1-9354-6c09b85dd533 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1850.749054] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1850.903840] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1850.964370] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.001345] env[62875]: DEBUG nova.compute.manager [req-0aae45ba-24dd-4495-a529-03f3eddc2999 req-b669fb27-3036-4655-964c-54ff939d68d9 service nova] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Received event network-changed-005b6c7e-e1e5-474e-963d-bba5887c69db {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1851.001536] env[62875]: DEBUG nova.compute.manager [req-0aae45ba-24dd-4495-a529-03f3eddc2999 req-b669fb27-3036-4655-964c-54ff939d68d9 service nova] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Refreshing instance network info cache due to event network-changed-005b6c7e-e1e5-474e-963d-bba5887c69db. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1851.001723] env[62875]: DEBUG oslo_concurrency.lockutils [req-0aae45ba-24dd-4495-a529-03f3eddc2999 req-b669fb27-3036-4655-964c-54ff939d68d9 service nova] Acquiring lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.024535] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.195095] env[62875]: ERROR nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. [ 1851.195095] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1851.195095] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1851.195095] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1851.195095] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1851.195095] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1851.195095] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1851.195095] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1851.195095] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1851.195095] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1851.195095] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1851.195095] env[62875]: ERROR nova.compute.manager raise self.value [ 1851.195095] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1851.195095] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1851.195095] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1851.195095] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1851.197422] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1851.197422] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1851.197422] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. [ 1851.197422] env[62875]: ERROR nova.compute.manager [ 1851.197422] env[62875]: Traceback (most recent call last): [ 1851.197422] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1851.197422] env[62875]: listener.cb(fileno) [ 1851.197422] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1851.197422] env[62875]: result = function(*args, **kwargs) [ 1851.197422] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1851.197422] env[62875]: return func(*args, **kwargs) [ 1851.197422] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1851.197422] env[62875]: raise e [ 1851.197422] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1851.197422] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1851.197422] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1851.197422] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1851.197422] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1851.197422] env[62875]: with excutils.save_and_reraise_exception(): [ 1851.197422] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1851.197422] env[62875]: self.force_reraise() [ 1851.197422] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1851.197422] env[62875]: raise self.value [ 1851.197422] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1851.197422] env[62875]: updated_port = self._update_port( [ 1851.197422] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1851.197422] env[62875]: _ensure_no_port_binding_failure(port) [ 1851.197422] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1851.197422] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1851.198324] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. [ 1851.198324] env[62875]: Removing descriptor: 21 [ 1851.198324] env[62875]: ERROR nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Traceback (most recent call last): [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] yield resources [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self.driver.spawn(context, instance, image_meta, [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1851.198324] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] vm_ref = self.build_virtual_machine(instance, [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] vif_infos = vmwarevif.get_vif_info(self._session, [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] for vif in network_info: [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] return self._sync_wrapper(fn, *args, **kwargs) [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self.wait() [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self[:] = self._gt.wait() [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] return self._exit_event.wait() [ 1851.198710] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] result = hub.switch() [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] return self.greenlet.switch() [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] result = function(*args, **kwargs) [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] return func(*args, **kwargs) [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] raise e [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] nwinfo = self.network_api.allocate_for_instance( [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1851.199092] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] created_port_ids = self._update_ports_for_instance( [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] with excutils.save_and_reraise_exception(): [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self.force_reraise() [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] raise self.value [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] updated_port = self._update_port( [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] _ensure_no_port_binding_failure(port) [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1851.199471] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] raise exception.PortBindingFailed(port_id=port['id']) [ 1851.199893] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] nova.exception.PortBindingFailed: Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. [ 1851.199893] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] [ 1851.199893] env[62875]: INFO nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Terminating instance [ 1851.268235] env[62875]: DEBUG nova.network.neutron [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1851.335945] env[62875]: DEBUG nova.network.neutron [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.352163] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Acquiring lock "74a1c3db-26b1-426e-be47-621c79ef9029" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.352728] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Lock "74a1c3db-26b1-426e-be47-621c79ef9029" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.473551] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Releasing lock "refresh_cache-048df8ed-3be9-430f-8ade-c3cabcb0f16c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.473551] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1851.473551] env[62875]: DEBUG nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1851.473707] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1851.496193] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1851.527860] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Releasing lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.527860] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1851.527860] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1851.528048] env[62875]: DEBUG oslo_concurrency.lockutils [req-0aae45ba-24dd-4495-a529-03f3eddc2999 req-b669fb27-3036-4655-964c-54ff939d68d9 service nova] Acquired lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.528901] env[62875]: DEBUG nova.network.neutron [req-0aae45ba-24dd-4495-a529-03f3eddc2999 req-b669fb27-3036-4655-964c-54ff939d68d9 service nova] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Refreshing network info cache for port 005b6c7e-e1e5-474e-963d-bba5887c69db {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1851.529506] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b46a3273-571b-439a-82a6-6c5d0d1ccfe0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.542432] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae7b63b-6b65-4ad6-b045-8264cb7390e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.576307] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d77191c5-8593-4730-8612-4877e059e7dc could not be found. [ 1851.576730] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1851.576971] env[62875]: INFO nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1851.577258] env[62875]: DEBUG oslo.service.loopingcall [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1851.580412] env[62875]: DEBUG nova.compute.manager [-] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1851.580532] env[62875]: DEBUG nova.network.neutron [-] [instance: d77191c5-8593-4730-8612-4877e059e7dc] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1851.615215] env[62875]: DEBUG nova.network.neutron [-] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1851.640339] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fd62f1-3bf0-4148-b5cb-81302d4f5222 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.647929] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b81d4e7-9527-47ca-b9b2-d7f7f4a59e9c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.682074] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6df872-2d27-4b68-9c7f-721bffa17671 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.692172] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27262562-4a9f-425b-b074-bc7bef01ccc0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.711565] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Acquiring lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.711638] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Acquired lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.711785] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1851.713119] env[62875]: DEBUG nova.compute.provider_tree [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1851.735666] env[62875]: INFO nova.scheduler.client.report [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Deleted allocations for instance f2891dfe-2464-4699-b4cd-54fa97cfb305 [ 1851.838493] env[62875]: DEBUG oslo_concurrency.lockutils [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] Releasing lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.838629] env[62875]: DEBUG nova.compute.manager [req-8c86b2c6-e925-4dec-8830-24aaf962a71b req-05cb84a8-8f4d-48c0-80cc-6e45f0ca0834 service nova] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Received event network-vif-deleted-b1b24ba0-e81a-43d1-9354-6c09b85dd533 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1852.002460] env[62875]: DEBUG nova.network.neutron [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.076800] env[62875]: DEBUG nova.network.neutron [req-0aae45ba-24dd-4495-a529-03f3eddc2999 req-b669fb27-3036-4655-964c-54ff939d68d9 service nova] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1852.120223] env[62875]: DEBUG nova.network.neutron [-] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.220637] env[62875]: DEBUG nova.scheduler.client.report [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1852.247396] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41f65d06-df19-4693-b965-95f5f2be0c49 tempest-InstanceActionsNegativeTestJSON-1641731192 tempest-InstanceActionsNegativeTestJSON-1641731192-project-member] Lock "f2891dfe-2464-4699-b4cd-54fa97cfb305" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.850s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.247396] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1852.294223] env[62875]: DEBUG nova.network.neutron [req-0aae45ba-24dd-4495-a529-03f3eddc2999 req-b669fb27-3036-4655-964c-54ff939d68d9 service nova] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.333113] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.508340] env[62875]: INFO nova.compute.manager [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 048df8ed-3be9-430f-8ade-c3cabcb0f16c] Took 1.03 seconds to deallocate network for instance. [ 1852.623653] env[62875]: INFO nova.compute.manager [-] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Took 1.04 seconds to deallocate network for instance. [ 1852.627206] env[62875]: DEBUG nova.compute.claims [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1852.627206] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.726410] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.727476] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1852.731542] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.712s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.751093] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1852.799053] env[62875]: DEBUG oslo_concurrency.lockutils [req-0aae45ba-24dd-4495-a529-03f3eddc2999 req-b669fb27-3036-4655-964c-54ff939d68d9 service nova] Releasing lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.835525] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Releasing lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.838457] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1852.838457] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1852.838457] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0dfb42cd-c2a2-43ba-9912-cc24be663fca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.849310] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b568ddc9-0c26-4380-b15e-4b065ac44364 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.879336] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c7f0f27e-5bb6-4306-ab9f-282578d1cfce could not be found. [ 1852.879655] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1852.879893] env[62875]: INFO nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1852.880377] env[62875]: DEBUG oslo.service.loopingcall [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1852.880665] env[62875]: DEBUG nova.compute.manager [-] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1852.880830] env[62875]: DEBUG nova.network.neutron [-] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1852.898940] env[62875]: DEBUG nova.network.neutron [-] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1853.241879] env[62875]: DEBUG nova.compute.utils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1853.249772] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1853.249772] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1853.284841] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.405981] env[62875]: DEBUG nova.network.neutron [-] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.468048] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Acquiring lock "ec4405a5-de44-4749-9225-3945db05ca6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.468379] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Lock "ec4405a5-de44-4749-9225-3945db05ca6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.478372] env[62875]: DEBUG nova.policy [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c17bfe9ca8d44c34b335e18c7aa3a583', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '816bd85d289047e38d3a7f169e61b5e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1853.566655] env[62875]: INFO nova.scheduler.client.report [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Deleted allocations for instance 048df8ed-3be9-430f-8ade-c3cabcb0f16c [ 1853.641929] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739206b4-dfd9-4a44-b217-a2943485ca86 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.654297] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdbe7ef-5fba-43cf-a561-00156cd026a6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.694371] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51a7b30-8747-420f-89d5-24adca10d085 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.703270] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea71695-f4c2-45e8-979e-3a8757c86087 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.718850] env[62875]: DEBUG nova.compute.provider_tree [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1853.750503] env[62875]: DEBUG nova.compute.manager [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Received event network-changed-09a14792-afd5-4d6d-8a3e-6b7346b95ecc {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1853.751087] env[62875]: DEBUG nova.compute.manager [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Refreshing instance network info cache due to event network-changed-09a14792-afd5-4d6d-8a3e-6b7346b95ecc. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1853.751087] env[62875]: DEBUG oslo_concurrency.lockutils [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] Acquiring lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.751721] env[62875]: DEBUG oslo_concurrency.lockutils [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] Acquired lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.752104] env[62875]: DEBUG nova.network.neutron [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Refreshing network info cache for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1853.753380] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1853.909264] env[62875]: INFO nova.compute.manager [-] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Took 1.03 seconds to deallocate network for instance. [ 1853.913129] env[62875]: DEBUG nova.compute.claims [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1853.913316] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.074470] env[62875]: DEBUG oslo_concurrency.lockutils [None req-64fea25e-0f4f-416e-a9f9-1a8fec513cc2 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "048df8ed-3be9-430f-8ade-c3cabcb0f16c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.515s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.222805] env[62875]: DEBUG nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1854.272541] env[62875]: DEBUG nova.compute.manager [req-4587428b-97b0-46a9-b049-3b2e7f35f371 req-60a22cb6-dac1-40db-b023-6a4885c74c51 service nova] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Received event network-vif-deleted-005b6c7e-e1e5-474e-963d-bba5887c69db {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1854.306123] env[62875]: DEBUG nova.network.neutron [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1854.431235] env[62875]: DEBUG nova.network.neutron [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.578405] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1854.735611] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.004s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.741126] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Traceback (most recent call last): [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self.driver.spawn(context, instance, image_meta, [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] vm_ref = self.build_virtual_machine(instance, [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] vif_infos = vmwarevif.get_vif_info(self._session, [ 1854.741126] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] for vif in network_info: [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] return self._sync_wrapper(fn, *args, **kwargs) [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self.wait() [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self[:] = self._gt.wait() [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] return self._exit_event.wait() [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] result = hub.switch() [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1854.741473] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] return self.greenlet.switch() [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] result = function(*args, **kwargs) [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] return func(*args, **kwargs) [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] raise e [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] nwinfo = self.network_api.allocate_for_instance( [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] created_port_ids = self._update_ports_for_instance( [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] with excutils.save_and_reraise_exception(): [ 1854.741839] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] self.force_reraise() [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] raise self.value [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] updated_port = self._update_port( [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] _ensure_no_port_binding_failure(port) [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] raise exception.PortBindingFailed(port_id=port['id']) [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] nova.exception.PortBindingFailed: Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. [ 1854.742241] env[62875]: ERROR nova.compute.manager [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] [ 1854.742641] env[62875]: DEBUG nova.compute.utils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1854.746537] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.304s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.749719] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Build of instance 15f747f0-43c0-4580-ab1c-28eadade4b82 was re-scheduled: Binding failed for port 73ed3ab8-2695-4597-9f6d-15d35a9ebc69, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1854.750259] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1854.750636] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.750636] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquired lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.750910] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1854.774497] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1854.814584] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1854.814914] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1854.816162] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1854.816162] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1854.816345] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1854.816504] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1854.816616] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1854.816774] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1854.816936] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1854.817144] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1854.817372] env[62875]: DEBUG nova.virt.hardware [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1854.818313] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f28fe29-6d62-4930-8f5d-81b61d2fa824 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.834728] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c8b0fe-d999-4206-838f-b3342658ba47 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.905145] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "7d241bf8-6f7a-467f-9640-a9819d5cca72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.905343] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "7d241bf8-6f7a-467f-9640-a9819d5cca72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.933877] env[62875]: DEBUG oslo_concurrency.lockutils [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] Releasing lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.933877] env[62875]: DEBUG nova.compute.manager [req-35b2cc43-115c-4bca-95d3-2f0ea919a011 req-1b15b203-06bd-4452-a47f-7a5d7ab61ca9 service nova] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Received event network-vif-deleted-09a14792-afd5-4d6d-8a3e-6b7346b95ecc {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1855.116437] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.302030] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1855.489896] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Successfully created port: 956de09f-0d66-4f05-baad-cd8e38ba632b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1855.601526] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226f2942-7c94-4aca-947a-d0f9f7fe0a07 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.612390] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f486aacb-71d4-4d86-9880-dd77cfe14c9f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.654380] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.656334] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435c4a51-1741-43cb-8b8c-bee3c4cc186d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.669545] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4d647b-b087-4272-acf0-805daba3e4e9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.686414] env[62875]: DEBUG nova.compute.provider_tree [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1856.162426] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Releasing lock "refresh_cache-15f747f0-43c0-4580-ab1c-28eadade4b82" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.162689] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1856.162886] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1856.163036] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1856.194470] env[62875]: DEBUG nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1856.312064] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1856.702583] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.957s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.703310] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Traceback (most recent call last): [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self.driver.spawn(context, instance, image_meta, [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] vm_ref = self.build_virtual_machine(instance, [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 1856.703310] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] for vif in network_info: [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] return self._sync_wrapper(fn, *args, **kwargs) [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self.wait() [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self[:] = self._gt.wait() [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] return self._exit_event.wait() [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] result = hub.switch() [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1856.703634] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] return self.greenlet.switch() [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] result = function(*args, **kwargs) [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] return func(*args, **kwargs) [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] raise e [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] nwinfo = self.network_api.allocate_for_instance( [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] created_port_ids = self._update_ports_for_instance( [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] with excutils.save_and_reraise_exception(): [ 1856.704363] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] self.force_reraise() [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] raise self.value [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] updated_port = self._update_port( [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] _ensure_no_port_binding_failure(port) [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] raise exception.PortBindingFailed(port_id=port['id']) [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] nova.exception.PortBindingFailed: Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. [ 1856.704713] env[62875]: ERROR nova.compute.manager [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] [ 1856.706315] env[62875]: DEBUG nova.compute.utils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1856.706315] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.013s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.709603] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Build of instance e41036b6-5ff4-4263-b319-9627b176b2dc was re-scheduled: Binding failed for port d3af8b79-1c53-42e6-a5b6-9d0429e0773f, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1856.710114] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1856.710379] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.710562] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquired lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.710759] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1856.817138] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.269306] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1857.321947] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: 15f747f0-43c0-4580-ab1c-28eadade4b82] Took 1.16 seconds to deallocate network for instance. [ 1857.570514] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc02d08-0eda-46da-97f4-b883428673b7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.581992] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0482a48-978f-48a1-86e3-6ba5efd4334d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.621288] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d4bd32-48ce-42d5-a93c-3b8ca2b52ca5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.627454] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff62a689-4175-4733-9ca7-d3824a8f3c32 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.642737] env[62875]: DEBUG nova.compute.provider_tree [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1857.717202] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.965119] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "793da91d-461a-465b-b9a3-c5fa0f5b877d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.965547] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "793da91d-461a-465b-b9a3-c5fa0f5b877d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.148058] env[62875]: DEBUG nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1858.220130] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Releasing lock "refresh_cache-e41036b6-5ff4-4263-b319-9627b176b2dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.220379] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1858.220717] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1858.220794] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1858.285593] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1858.369852] env[62875]: INFO nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Deleted allocations for instance 15f747f0-43c0-4580-ab1c-28eadade4b82 [ 1858.656508] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.951s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.657419] env[62875]: ERROR nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Traceback (most recent call last): [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self.driver.spawn(context, instance, image_meta, [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] vm_ref = self.build_virtual_machine(instance, [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] vif_infos = vmwarevif.get_vif_info(self._session, [ 1858.657419] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] for vif in network_info: [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] return self._sync_wrapper(fn, *args, **kwargs) [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self.wait() [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self[:] = self._gt.wait() [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] return self._exit_event.wait() [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] result = hub.switch() [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1858.657814] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] return self.greenlet.switch() [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] result = function(*args, **kwargs) [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] return func(*args, **kwargs) [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] raise e [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] nwinfo = self.network_api.allocate_for_instance( [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] created_port_ids = self._update_ports_for_instance( [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] with excutils.save_and_reraise_exception(): [ 1858.658192] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] self.force_reraise() [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] raise self.value [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] updated_port = self._update_port( [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] _ensure_no_port_binding_failure(port) [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] raise exception.PortBindingFailed(port_id=port['id']) [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] nova.exception.PortBindingFailed: Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. [ 1858.658523] env[62875]: ERROR nova.compute.manager [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] [ 1858.658819] env[62875]: DEBUG nova.compute.utils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1858.663694] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Build of instance f013c0a3-fb9c-480b-b479-e81c1b1e8234 was re-scheduled: Binding failed for port 454ce4f2-c67c-473c-a97b-86de05e81627, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1858.663694] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1858.663694] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquiring lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.663694] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Acquired lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.663956] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1858.663956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.053s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.788742] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.878445] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "15f747f0-43c0-4580-ab1c-28eadade4b82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.987s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.023955] env[62875]: DEBUG nova.compute.manager [req-0f180ba6-3053-4ee0-a18c-4b31f2945568 req-331f1a28-a7d7-4302-a182-5fb65baa6384 service nova] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Received event network-changed-956de09f-0d66-4f05-baad-cd8e38ba632b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1859.024176] env[62875]: DEBUG nova.compute.manager [req-0f180ba6-3053-4ee0-a18c-4b31f2945568 req-331f1a28-a7d7-4302-a182-5fb65baa6384 service nova] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Refreshing instance network info cache due to event network-changed-956de09f-0d66-4f05-baad-cd8e38ba632b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1859.024390] env[62875]: DEBUG oslo_concurrency.lockutils [req-0f180ba6-3053-4ee0-a18c-4b31f2945568 req-331f1a28-a7d7-4302-a182-5fb65baa6384 service nova] Acquiring lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.024532] env[62875]: DEBUG oslo_concurrency.lockutils [req-0f180ba6-3053-4ee0-a18c-4b31f2945568 req-331f1a28-a7d7-4302-a182-5fb65baa6384 service nova] Acquired lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.024728] env[62875]: DEBUG nova.network.neutron [req-0f180ba6-3053-4ee0-a18c-4b31f2945568 req-331f1a28-a7d7-4302-a182-5fb65baa6384 service nova] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Refreshing network info cache for port 956de09f-0d66-4f05-baad-cd8e38ba632b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1859.216435] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1859.292403] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: e41036b6-5ff4-4263-b319-9627b176b2dc] Took 1.07 seconds to deallocate network for instance. [ 1859.361024] env[62875]: ERROR nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. [ 1859.361024] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1859.361024] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1859.361024] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1859.361024] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1859.361024] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1859.361024] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1859.361024] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1859.361024] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1859.361024] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1859.361024] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1859.361024] env[62875]: ERROR nova.compute.manager raise self.value [ 1859.361024] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1859.361024] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1859.361024] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1859.361024] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1859.361694] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1859.361694] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1859.361694] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. [ 1859.361694] env[62875]: ERROR nova.compute.manager [ 1859.361694] env[62875]: Traceback (most recent call last): [ 1859.361694] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1859.361694] env[62875]: listener.cb(fileno) [ 1859.361694] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1859.361694] env[62875]: result = function(*args, **kwargs) [ 1859.361694] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1859.361694] env[62875]: return func(*args, **kwargs) [ 1859.361694] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1859.361694] env[62875]: raise e [ 1859.361694] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1859.361694] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1859.361694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1859.361694] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1859.361694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1859.361694] env[62875]: with excutils.save_and_reraise_exception(): [ 1859.361694] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1859.361694] env[62875]: self.force_reraise() [ 1859.361694] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1859.361694] env[62875]: raise self.value [ 1859.361694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1859.361694] env[62875]: updated_port = self._update_port( [ 1859.361694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1859.361694] env[62875]: _ensure_no_port_binding_failure(port) [ 1859.361694] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1859.361694] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1859.362513] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. [ 1859.362513] env[62875]: Removing descriptor: 18 [ 1859.362513] env[62875]: ERROR nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Traceback (most recent call last): [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] yield resources [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self.driver.spawn(context, instance, image_meta, [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1859.362513] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] vm_ref = self.build_virtual_machine(instance, [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] vif_infos = vmwarevif.get_vif_info(self._session, [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] for vif in network_info: [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] return self._sync_wrapper(fn, *args, **kwargs) [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self.wait() [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self[:] = self._gt.wait() [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] return self._exit_event.wait() [ 1859.362917] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] result = hub.switch() [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] return self.greenlet.switch() [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] result = function(*args, **kwargs) [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] return func(*args, **kwargs) [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] raise e [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] nwinfo = self.network_api.allocate_for_instance( [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1859.363281] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] created_port_ids = self._update_ports_for_instance( [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] with excutils.save_and_reraise_exception(): [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self.force_reraise() [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] raise self.value [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] updated_port = self._update_port( [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] _ensure_no_port_binding_failure(port) [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1859.363638] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] raise exception.PortBindingFailed(port_id=port['id']) [ 1859.364019] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] nova.exception.PortBindingFailed: Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. [ 1859.364019] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] [ 1859.364019] env[62875]: INFO nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Terminating instance [ 1859.381353] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1859.504233] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.548708] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f8ce2c-6eee-4af0-a641-cab1d1d52a26 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.558244] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff35843-fe30-499b-b470-4897c4b3689e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.600908] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78918ae-9b52-486b-8fd7-097b799df985 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.604967] env[62875]: DEBUG nova.network.neutron [req-0f180ba6-3053-4ee0-a18c-4b31f2945568 req-331f1a28-a7d7-4302-a182-5fb65baa6384 service nova] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1859.612121] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02d0dd0-74f3-4a03-97d1-05494ff6edfb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.626500] env[62875]: DEBUG nova.compute.provider_tree [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.868951] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.913144] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.958179] env[62875]: DEBUG nova.network.neutron [req-0f180ba6-3053-4ee0-a18c-4b31f2945568 req-331f1a28-a7d7-4302-a182-5fb65baa6384 service nova] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.011664] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Releasing lock "refresh_cache-f013c0a3-fb9c-480b-b479-e81c1b1e8234" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.011968] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1860.012095] env[62875]: DEBUG nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1860.012292] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1860.064371] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1860.131358] env[62875]: DEBUG nova.scheduler.client.report [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1860.347950] env[62875]: INFO nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Deleted allocations for instance e41036b6-5ff4-4263-b319-9627b176b2dc [ 1860.384055] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "4ee68d78-b265-4ee8-afcc-ce6ed150fb73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.384806] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "4ee68d78-b265-4ee8-afcc-ce6ed150fb73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.462011] env[62875]: DEBUG oslo_concurrency.lockutils [req-0f180ba6-3053-4ee0-a18c-4b31f2945568 req-331f1a28-a7d7-4302-a182-5fb65baa6384 service nova] Releasing lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.462011] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquired lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.462011] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1860.569954] env[62875]: DEBUG nova.network.neutron [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.640723] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.976s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.641118] env[62875]: ERROR nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Traceback (most recent call last): [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self.driver.spawn(context, instance, image_meta, [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] vm_ref = self.build_virtual_machine(instance, [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] vif_infos = vmwarevif.get_vif_info(self._session, [ 1860.641118] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] for vif in network_info: [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] return self._sync_wrapper(fn, *args, **kwargs) [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self.wait() [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self[:] = self._gt.wait() [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] return self._exit_event.wait() [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] result = hub.switch() [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1860.641674] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] return self.greenlet.switch() [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] result = function(*args, **kwargs) [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] return func(*args, **kwargs) [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] raise e [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] nwinfo = self.network_api.allocate_for_instance( [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] created_port_ids = self._update_ports_for_instance( [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] with excutils.save_and_reraise_exception(): [ 1860.642504] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] self.force_reraise() [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] raise self.value [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] updated_port = self._update_port( [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] _ensure_no_port_binding_failure(port) [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] raise exception.PortBindingFailed(port_id=port['id']) [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] nova.exception.PortBindingFailed: Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. [ 1860.643344] env[62875]: ERROR nova.compute.manager [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] [ 1860.644551] env[62875]: DEBUG nova.compute.utils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1860.645381] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.579s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.645611] env[62875]: DEBUG nova.objects.instance [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lazy-loading 'resources' on Instance uuid f27770de-40f5-4d5a-8819-a62d8f9a320a {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1860.653345] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Build of instance 24f59fd1-63bf-4292-81f1-dc762510151f was re-scheduled: Binding failed for port e0a41d02-7676-444c-89da-b83506568fa2, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1860.653345] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1860.653345] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Acquiring lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.653345] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Acquired lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.653542] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1860.867194] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "e41036b6-5ff4-4263-b319-9627b176b2dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.917s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.998578] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1861.073889] env[62875]: INFO nova.compute.manager [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] [instance: f013c0a3-fb9c-480b-b479-e81c1b1e8234] Took 1.06 seconds to deallocate network for instance. [ 1861.192064] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.253960] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1861.378841] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1861.546231] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Acquiring lock "c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.546755] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Lock "c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.553864] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5324ff7-36de-4b38-872d-25e397c9022d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.564806] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49852818-f257-41ca-bbb2-fe8102ad590b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.604198] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0ae276-62ab-4375-9632-bf4d2c978dee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.614021] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ca8f06-1e4e-4a8f-bcdd-159a72bd9244 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.630733] env[62875]: DEBUG nova.compute.provider_tree [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.662936] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.699782] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Releasing lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.700373] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1861.701436] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1861.701580] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79a9342e-91d7-4a1d-928a-110f8dd48e65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.715033] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7bda39-31a0-4ad5-8e42-7a34fb734bbb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.744604] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 495c8c6c-f90e-4a26-a248-0672e08df66a could not be found. [ 1861.744862] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1861.745102] env[62875]: INFO nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1861.745341] env[62875]: DEBUG oslo.service.loopingcall [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1861.746034] env[62875]: DEBUG nova.compute.manager [-] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1861.746034] env[62875]: DEBUG nova.network.neutron [-] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1861.794484] env[62875]: DEBUG nova.network.neutron [-] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1861.911030] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.130022] env[62875]: INFO nova.scheduler.client.report [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Deleted allocations for instance f013c0a3-fb9c-480b-b479-e81c1b1e8234 [ 1862.135140] env[62875]: DEBUG nova.scheduler.client.report [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1862.167265] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Releasing lock "refresh_cache-24f59fd1-63bf-4292-81f1-dc762510151f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.167874] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1862.168098] env[62875]: DEBUG nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1862.168268] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1862.202646] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1862.300386] env[62875]: DEBUG nova.network.neutron [-] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.441020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "6eb92420-57b1-4a7d-973f-10cd47be0416" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.441020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "6eb92420-57b1-4a7d-973f-10cd47be0416" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.641901] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c7824e7a-1a6e-4201-bade-f18c767fc1c0 tempest-ListServersNegativeTestJSON-130907065 tempest-ListServersNegativeTestJSON-130907065-project-member] Lock "f013c0a3-fb9c-480b-b479-e81c1b1e8234" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.655s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.641901] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.996s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.645920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.449s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.667432] env[62875]: DEBUG nova.compute.manager [req-4235e395-4e66-4034-b4b5-edc4cc109f35 req-755e012b-78e2-436c-b3f4-f0b53fa28677 service nova] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Received event network-vif-deleted-956de09f-0d66-4f05-baad-cd8e38ba632b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1862.688528] env[62875]: INFO nova.scheduler.client.report [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Deleted allocations for instance f27770de-40f5-4d5a-8819-a62d8f9a320a [ 1862.708154] env[62875]: DEBUG nova.network.neutron [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.805793] env[62875]: INFO nova.compute.manager [-] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Took 1.06 seconds to deallocate network for instance. [ 1862.809444] env[62875]: DEBUG nova.compute.claims [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1862.809651] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.157708] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1863.202337] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b970d0a-1afb-4b74-bb9c-c7b803316e18 tempest-ServersAaction247Test-881912992 tempest-ServersAaction247Test-881912992-project-member] Lock "f27770de-40f5-4d5a-8819-a62d8f9a320a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.739s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.211223] env[62875]: INFO nova.compute.manager [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] [instance: 24f59fd1-63bf-4292-81f1-dc762510151f] Took 1.04 seconds to deallocate network for instance. [ 1863.552245] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b17ea1-2adf-4cce-8ead-b648f90d4bfc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.565911] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2010334-3e80-48ae-9954-e32a5f38b335 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.597012] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3675dd-662f-4da9-8630-2d8bc76693ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.605358] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63129313-1086-415d-8324-ba4c9ef8acdf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.619868] env[62875]: DEBUG nova.compute.provider_tree [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1863.686724] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.124793] env[62875]: DEBUG nova.scheduler.client.report [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1864.258699] env[62875]: INFO nova.scheduler.client.report [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Deleted allocations for instance 24f59fd1-63bf-4292-81f1-dc762510151f [ 1864.632727] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.986s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.633621] env[62875]: ERROR nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Traceback (most recent call last): [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self.driver.spawn(context, instance, image_meta, [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] vm_ref = self.build_virtual_machine(instance, [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] vif_infos = vmwarevif.get_vif_info(self._session, [ 1864.633621] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] for vif in network_info: [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] return self._sync_wrapper(fn, *args, **kwargs) [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self.wait() [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self[:] = self._gt.wait() [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] return self._exit_event.wait() [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] result = hub.switch() [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1864.634485] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] return self.greenlet.switch() [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] result = function(*args, **kwargs) [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] return func(*args, **kwargs) [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] raise e [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] nwinfo = self.network_api.allocate_for_instance( [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] created_port_ids = self._update_ports_for_instance( [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] with excutils.save_and_reraise_exception(): [ 1864.635935] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] self.force_reraise() [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] raise self.value [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] updated_port = self._update_port( [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] _ensure_no_port_binding_failure(port) [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] raise exception.PortBindingFailed(port_id=port['id']) [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] nova.exception.PortBindingFailed: Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. [ 1864.636347] env[62875]: ERROR nova.compute.manager [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] [ 1864.636952] env[62875]: DEBUG nova.compute.utils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1864.636952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.425s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.636952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.636952] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1864.636952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.009s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.642204] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Build of instance ec33d0e1-4435-45c3-8ecf-33465cafda0e was re-scheduled: Binding failed for port b1b24ba0-e81a-43d1-9354-6c09b85dd533, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1864.642662] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1864.642887] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Acquiring lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.643039] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Acquired lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.643197] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1864.644713] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edd861f-e024-4d9a-a797-fdf8a24ba1a4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.659899] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b04e36-45ad-48a2-b5ec-6c37bd372218 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.680865] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e5ef8c-ec43-4fca-8162-7724eab8204e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.688636] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adeac52e-95b2-4775-9bfb-3b2b2faabae5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.719925] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181214MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1864.719925] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.768890] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8cecad36-5765-481e-8a95-0816e1ba5e86 tempest-FloatingIPsAssociationNegativeTestJSON-122506343 tempest-FloatingIPsAssociationNegativeTestJSON-122506343-project-member] Lock "24f59fd1-63bf-4292-81f1-dc762510151f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.397s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.180932] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1865.272141] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1865.288896] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.465837] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a06a6f9-fc20-4630-a682-169a2028f726 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.477409] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd222b8-ddb3-441a-a079-8e555daa5570 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.516420] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582078b0-222e-4100-b593-9417f762a284 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.528371] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44d35f8-0716-45f6-9929-7f5ec3ef1237 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.544734] env[62875]: DEBUG nova.compute.provider_tree [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.795035] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Releasing lock "refresh_cache-ec33d0e1-4435-45c3-8ecf-33465cafda0e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.795035] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1865.795035] env[62875]: DEBUG nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1865.795035] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1865.797353] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.819545] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1866.048269] env[62875]: DEBUG nova.scheduler.client.report [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1866.082557] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Acquiring lock "a19b7959-df3c-47e8-b920-edfe82c36489" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.082801] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Lock "a19b7959-df3c-47e8-b920-edfe82c36489" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.328503] env[62875]: DEBUG nova.network.neutron [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.487604] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Acquiring lock "e11311ed-6804-4df4-a775-9060463ac927" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.487604] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Lock "e11311ed-6804-4df4-a775-9060463ac927" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.555019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.917s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.555019] env[62875]: ERROR nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. [ 1866.555019] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] Traceback (most recent call last): [ 1866.555019] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1866.555019] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self.driver.spawn(context, instance, image_meta, [ 1866.555019] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1866.555019] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1866.555019] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1866.555019] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] vm_ref = self.build_virtual_machine(instance, [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] for vif in network_info: [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] return self._sync_wrapper(fn, *args, **kwargs) [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self.wait() [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self[:] = self._gt.wait() [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] return self._exit_event.wait() [ 1866.555348] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] result = hub.switch() [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] return self.greenlet.switch() [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] result = function(*args, **kwargs) [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] return func(*args, **kwargs) [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] raise e [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] nwinfo = self.network_api.allocate_for_instance( [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1866.555699] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] created_port_ids = self._update_ports_for_instance( [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] with excutils.save_and_reraise_exception(): [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] self.force_reraise() [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] raise self.value [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] updated_port = self._update_port( [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] _ensure_no_port_binding_failure(port) [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1866.556057] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] raise exception.PortBindingFailed(port_id=port['id']) [ 1866.556383] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] nova.exception.PortBindingFailed: Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. [ 1866.556383] env[62875]: ERROR nova.compute.manager [instance: d77191c5-8593-4730-8612-4877e059e7dc] [ 1866.556383] env[62875]: DEBUG nova.compute.utils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1866.558492] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.274s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.560723] env[62875]: INFO nova.compute.claims [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1866.564948] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Build of instance d77191c5-8593-4730-8612-4877e059e7dc was re-scheduled: Binding failed for port 005b6c7e-e1e5-474e-963d-bba5887c69db, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1866.567449] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1866.567449] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Acquiring lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.567449] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Acquired lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.567449] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1866.836293] env[62875]: INFO nova.compute.manager [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] [instance: ec33d0e1-4435-45c3-8ecf-33465cafda0e] Took 1.04 seconds to deallocate network for instance. [ 1867.056935] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Acquiring lock "d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.058616] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Lock "d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.129139] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1867.314360] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.816252] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Releasing lock "refresh_cache-d77191c5-8593-4730-8612-4877e059e7dc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.816532] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1867.816703] env[62875]: DEBUG nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1867.816921] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1867.871321] env[62875]: INFO nova.scheduler.client.report [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Deleted allocations for instance ec33d0e1-4435-45c3-8ecf-33465cafda0e [ 1867.959701] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3fdc33-3cc7-4a47-849e-8ffa20dd6a07 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.968148] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a51c6ac-4ee2-4567-b67e-b349bc1e50b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.006347] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57613526-85e1-4c99-abad-19218f9c21c4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.020020] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89726c29-7377-4fbe-bd09-42a7ab3f0276 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.030465] env[62875]: DEBUG nova.compute.provider_tree [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.035531] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1868.037065] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "782e6663-202b-4ed0-8a1a-cc54f246143b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.038049] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "782e6663-202b-4ed0-8a1a-cc54f246143b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.380008] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db0a8958-6315-4df1-8cc6-ecf9dbb444e2 tempest-ServersAdminNegativeTestJSON-582189954 tempest-ServersAdminNegativeTestJSON-582189954-project-member] Lock "ec33d0e1-4435-45c3-8ecf-33465cafda0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.626s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.538599] env[62875]: DEBUG nova.scheduler.client.report [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1868.543515] env[62875]: DEBUG nova.network.neutron [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.885329] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1869.046607] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.047186] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1869.050552] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.137s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.054954] env[62875]: INFO nova.compute.manager [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] [instance: d77191c5-8593-4730-8612-4877e059e7dc] Took 1.24 seconds to deallocate network for instance. [ 1869.421957] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.563896] env[62875]: DEBUG nova.compute.utils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1869.571952] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1869.572147] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1869.649265] env[62875]: DEBUG nova.policy [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82aed3d31b5c47b8913e6b0104aad154', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91c657ed4043449087581cebdbc99259', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1869.952362] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307e45b8-fbbf-4c72-9030-25fe69e43ddd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.960859] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1d21f5-c09e-4fde-bb31-d89798f1a404 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.996469] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0967989-8029-4db4-b0cd-236ec74626aa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.004382] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd25e1ad-e28d-43af-ae55-0e9aea4bc006 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.020445] env[62875]: DEBUG nova.compute.provider_tree [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.074854] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1870.106943] env[62875]: INFO nova.scheduler.client.report [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Deleted allocations for instance d77191c5-8593-4730-8612-4877e059e7dc [ 1870.221775] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Acquiring lock "100c3541-3af3-4d3c-8060-2235f18f51e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.222025] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Lock "100c3541-3af3-4d3c-8060-2235f18f51e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.527140] env[62875]: DEBUG nova.scheduler.client.report [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1870.621794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1b0822fe-ee13-4a6f-ac38-07badf695264 tempest-ServerDiagnosticsTest-1602865755 tempest-ServerDiagnosticsTest-1602865755-project-member] Lock "d77191c5-8593-4730-8612-4877e059e7dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.287s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.649511] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Successfully created port: f322eec3-8899-4969-a7e2-1ae5502072ec {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1871.032460] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.982s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.033141] env[62875]: ERROR nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Traceback (most recent call last): [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self.driver.spawn(context, instance, image_meta, [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] vm_ref = self.build_virtual_machine(instance, [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] vif_infos = vmwarevif.get_vif_info(self._session, [ 1871.033141] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] for vif in network_info: [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] return self._sync_wrapper(fn, *args, **kwargs) [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self.wait() [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self[:] = self._gt.wait() [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] return self._exit_event.wait() [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] result = hub.switch() [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1871.033524] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] return self.greenlet.switch() [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] result = function(*args, **kwargs) [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] return func(*args, **kwargs) [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] raise e [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] nwinfo = self.network_api.allocate_for_instance( [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] created_port_ids = self._update_ports_for_instance( [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] with excutils.save_and_reraise_exception(): [ 1871.033910] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] self.force_reraise() [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] raise self.value [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] updated_port = self._update_port( [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] _ensure_no_port_binding_failure(port) [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] raise exception.PortBindingFailed(port_id=port['id']) [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] nova.exception.PortBindingFailed: Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. [ 1871.034314] env[62875]: ERROR nova.compute.manager [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] [ 1871.034643] env[62875]: DEBUG nova.compute.utils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1871.035420] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.919s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.040933] env[62875]: INFO nova.compute.claims [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1871.046626] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Build of instance c7f0f27e-5bb6-4306-ab9f-282578d1cfce was re-scheduled: Binding failed for port 09a14792-afd5-4d6d-8a3e-6b7346b95ecc, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1871.046626] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1871.046626] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Acquiring lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.046626] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Acquired lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.046848] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1871.086201] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1871.122679] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1871.124251] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1871.124460] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1871.124662] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1871.124819] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1871.124960] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1871.125188] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1871.125446] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1871.125509] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1871.125718] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1871.125820] env[62875]: DEBUG nova.virt.hardware [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1871.126246] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1871.129634] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7207d42-ef6d-4424-a1d4-376b4982d6dc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.143364] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a807ba-406a-4a7a-9cda-3cfd5d77faff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.592626] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1871.661684] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.677645] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.005148] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bf9872dc-26c3-43f6-9c0f-2d9857ce6aff tempest-ServersListShow296Test-242798754 tempest-ServersListShow296Test-242798754-project-member] Acquiring lock "7d90a82f-3ee1-40c5-b351-87145b7b567c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.005389] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bf9872dc-26c3-43f6-9c0f-2d9857ce6aff tempest-ServersListShow296Test-242798754 tempest-ServersListShow296Test-242798754-project-member] Lock "7d90a82f-3ee1-40c5-b351-87145b7b567c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.180917] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Releasing lock "refresh_cache-c7f0f27e-5bb6-4306-ab9f-282578d1cfce" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.181931] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1872.181931] env[62875]: DEBUG nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1872.181931] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1872.218982] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1872.445758] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966ce34d-6fb4-47a6-9802-251c5f0f16eb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.454873] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce5c587-02a0-4a51-b406-328e46a336e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.152161] env[62875]: DEBUG nova.network.neutron [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.154395] env[62875]: ERROR nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. [ 1873.154395] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1873.154395] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1873.154395] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1873.154395] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1873.154395] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1873.154395] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1873.154395] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1873.154395] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1873.154395] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1873.154395] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1873.154395] env[62875]: ERROR nova.compute.manager raise self.value [ 1873.154395] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1873.154395] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1873.154395] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1873.154395] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1873.155291] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1873.155291] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1873.155291] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. [ 1873.155291] env[62875]: ERROR nova.compute.manager [ 1873.155291] env[62875]: Traceback (most recent call last): [ 1873.155291] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1873.155291] env[62875]: listener.cb(fileno) [ 1873.155291] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1873.155291] env[62875]: result = function(*args, **kwargs) [ 1873.155291] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1873.155291] env[62875]: return func(*args, **kwargs) [ 1873.155291] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1873.155291] env[62875]: raise e [ 1873.155291] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1873.155291] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1873.155291] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1873.155291] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1873.155291] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1873.155291] env[62875]: with excutils.save_and_reraise_exception(): [ 1873.155291] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1873.155291] env[62875]: self.force_reraise() [ 1873.155291] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1873.155291] env[62875]: raise self.value [ 1873.155291] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1873.155291] env[62875]: updated_port = self._update_port( [ 1873.155291] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1873.155291] env[62875]: _ensure_no_port_binding_failure(port) [ 1873.155291] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1873.155291] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1873.156914] env[62875]: nova.exception.PortBindingFailed: Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. [ 1873.156914] env[62875]: Removing descriptor: 18 [ 1873.156914] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce229e0-cc3b-46d4-8e52-401a51e778b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.158376] env[62875]: ERROR nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] Traceback (most recent call last): [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] yield resources [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self.driver.spawn(context, instance, image_meta, [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] vm_ref = self.build_virtual_machine(instance, [ 1873.158376] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] for vif in network_info: [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] return self._sync_wrapper(fn, *args, **kwargs) [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self.wait() [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self[:] = self._gt.wait() [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] return self._exit_event.wait() [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1873.159278] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] result = hub.switch() [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] return self.greenlet.switch() [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] result = function(*args, **kwargs) [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] return func(*args, **kwargs) [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] raise e [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] nwinfo = self.network_api.allocate_for_instance( [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] created_port_ids = self._update_ports_for_instance( [ 1873.159832] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] with excutils.save_and_reraise_exception(): [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self.force_reraise() [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] raise self.value [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] updated_port = self._update_port( [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] _ensure_no_port_binding_failure(port) [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] raise exception.PortBindingFailed(port_id=port['id']) [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] nova.exception.PortBindingFailed: Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. [ 1873.160220] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] [ 1873.160617] env[62875]: INFO nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Terminating instance [ 1873.165584] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bc26aa-ea4e-4f55-a367-7c7a791ecdcf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.181106] env[62875]: DEBUG nova.compute.provider_tree [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1873.313529] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Acquiring lock "77d57f64-9bab-46f1-87b4-62bac5c5d2bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.313740] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Lock "77d57f64-9bab-46f1-87b4-62bac5c5d2bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.356948] env[62875]: DEBUG nova.compute.manager [req-84fc2c22-625f-432a-91ea-7c9f05e307be req-39174236-81d1-4dd3-95c5-308cdae44135 service nova] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Received event network-changed-f322eec3-8899-4969-a7e2-1ae5502072ec {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1873.357413] env[62875]: DEBUG nova.compute.manager [req-84fc2c22-625f-432a-91ea-7c9f05e307be req-39174236-81d1-4dd3-95c5-308cdae44135 service nova] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Refreshing instance network info cache due to event network-changed-f322eec3-8899-4969-a7e2-1ae5502072ec. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1873.357701] env[62875]: DEBUG oslo_concurrency.lockutils [req-84fc2c22-625f-432a-91ea-7c9f05e307be req-39174236-81d1-4dd3-95c5-308cdae44135 service nova] Acquiring lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.357952] env[62875]: DEBUG oslo_concurrency.lockutils [req-84fc2c22-625f-432a-91ea-7c9f05e307be req-39174236-81d1-4dd3-95c5-308cdae44135 service nova] Acquired lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.358377] env[62875]: DEBUG nova.network.neutron [req-84fc2c22-625f-432a-91ea-7c9f05e307be req-39174236-81d1-4dd3-95c5-308cdae44135 service nova] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Refreshing network info cache for port f322eec3-8899-4969-a7e2-1ae5502072ec {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1873.662093] env[62875]: INFO nova.compute.manager [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] [instance: c7f0f27e-5bb6-4306-ab9f-282578d1cfce] Took 1.48 seconds to deallocate network for instance. [ 1873.667644] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Acquiring lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.682739] env[62875]: DEBUG nova.scheduler.client.report [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1873.884776] env[62875]: DEBUG nova.network.neutron [req-84fc2c22-625f-432a-91ea-7c9f05e307be req-39174236-81d1-4dd3-95c5-308cdae44135 service nova] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1874.127170] env[62875]: DEBUG nova.network.neutron [req-84fc2c22-625f-432a-91ea-7c9f05e307be req-39174236-81d1-4dd3-95c5-308cdae44135 service nova] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.187491] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.152s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.189632] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1874.191936] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.279s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.193020] env[62875]: INFO nova.compute.claims [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1874.628662] env[62875]: DEBUG oslo_concurrency.lockutils [req-84fc2c22-625f-432a-91ea-7c9f05e307be req-39174236-81d1-4dd3-95c5-308cdae44135 service nova] Releasing lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.629262] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Acquired lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.629482] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1874.699074] env[62875]: DEBUG nova.compute.utils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1874.705952] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1874.706141] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1874.726243] env[62875]: INFO nova.scheduler.client.report [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Deleted allocations for instance c7f0f27e-5bb6-4306-ab9f-282578d1cfce [ 1874.781156] env[62875]: DEBUG nova.policy [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e24ac140d88e4488a61e014a59008a0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c8d2c567cad46ce8f6bdf4541da4ea5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1875.156819] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1875.207121] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1875.235537] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9389d7f-76cb-478c-9490-412831b9554f tempest-TenantUsagesTestJSON-1193951861 tempest-TenantUsagesTestJSON-1193951861-project-member] Lock "c7f0f27e-5bb6-4306-ab9f-282578d1cfce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.889s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.291720] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.491305] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Successfully created port: c2c906c3-5e7b-42b6-9f48-6fa705fb4733 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1875.652988] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40eab4e4-4323-4ebd-81f4-81e53411abbe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.663210] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c671e53-0d5f-4d21-a271-e3c8b53f13cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.701146] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fe3f95-e402-4a06-807f-42db348428a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.710536] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca35afa-46f8-4689-9e1c-af170bb9c20d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.731124] env[62875]: DEBUG nova.compute.provider_tree [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.743892] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1875.795842] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Releasing lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.796223] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1875.796513] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1875.797178] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dba6a44c-0e72-4b74-9e13-ff8d961a2e3a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.813286] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3fa54b-95aa-4037-baa1-58373efe15be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.842050] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cefcbb6a-378b-4927-b115-d648017502e9 could not be found. [ 1875.842328] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1875.842807] env[62875]: INFO nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1875.842845] env[62875]: DEBUG oslo.service.loopingcall [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1875.843152] env[62875]: DEBUG nova.compute.manager [-] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1875.843229] env[62875]: DEBUG nova.network.neutron [-] [instance: cefcbb6a-378b-4927-b115-d648017502e9] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1875.866818] env[62875]: DEBUG nova.network.neutron [-] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1876.185734] env[62875]: DEBUG nova.compute.manager [req-d74ba753-389d-4889-a59d-b1305c482e78 req-a8a23fa9-6c1e-447d-91b2-bd0029f461d4 service nova] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Received event network-vif-deleted-f322eec3-8899-4969-a7e2-1ae5502072ec {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1876.221993] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1876.234230] env[62875]: DEBUG nova.scheduler.client.report [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1876.252581] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1876.252822] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1876.252977] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1876.253176] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1876.253320] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1876.253462] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1876.253658] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1876.253813] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1876.253978] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1876.254233] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1876.254329] env[62875]: DEBUG nova.virt.hardware [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1876.255503] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1ab4c4-7017-4853-8b34-38c45a09acf5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.269100] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27e9897-460a-4985-9595-0324e7c37934 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.283947] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.372430] env[62875]: DEBUG nova.network.neutron [-] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.739679] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.740349] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1876.746550] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.836s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.748637] env[62875]: INFO nova.compute.claims [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1876.876956] env[62875]: INFO nova.compute.manager [-] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Took 1.03 seconds to deallocate network for instance. [ 1876.879642] env[62875]: DEBUG nova.compute.claims [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1876.879833] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.042774] env[62875]: ERROR nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. [ 1877.042774] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1877.042774] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1877.042774] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1877.042774] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1877.042774] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1877.042774] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1877.042774] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1877.042774] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1877.042774] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1877.042774] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1877.042774] env[62875]: ERROR nova.compute.manager raise self.value [ 1877.042774] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1877.042774] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1877.042774] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1877.042774] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1877.043146] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1877.043146] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1877.043146] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. [ 1877.043146] env[62875]: ERROR nova.compute.manager [ 1877.043146] env[62875]: Traceback (most recent call last): [ 1877.043146] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1877.043146] env[62875]: listener.cb(fileno) [ 1877.043146] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1877.043146] env[62875]: result = function(*args, **kwargs) [ 1877.043146] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1877.043146] env[62875]: return func(*args, **kwargs) [ 1877.043146] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1877.043146] env[62875]: raise e [ 1877.043146] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1877.043146] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1877.043146] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1877.043146] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1877.043146] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1877.043146] env[62875]: with excutils.save_and_reraise_exception(): [ 1877.043146] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1877.043146] env[62875]: self.force_reraise() [ 1877.043146] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1877.043146] env[62875]: raise self.value [ 1877.043146] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1877.043146] env[62875]: updated_port = self._update_port( [ 1877.043146] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1877.043146] env[62875]: _ensure_no_port_binding_failure(port) [ 1877.043146] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1877.043146] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1877.043743] env[62875]: nova.exception.PortBindingFailed: Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. [ 1877.043743] env[62875]: Removing descriptor: 18 [ 1877.043743] env[62875]: ERROR nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Traceback (most recent call last): [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] yield resources [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self.driver.spawn(context, instance, image_meta, [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1877.043743] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] vm_ref = self.build_virtual_machine(instance, [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] vif_infos = vmwarevif.get_vif_info(self._session, [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] for vif in network_info: [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] return self._sync_wrapper(fn, *args, **kwargs) [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self.wait() [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self[:] = self._gt.wait() [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] return self._exit_event.wait() [ 1877.043974] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] result = hub.switch() [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] return self.greenlet.switch() [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] result = function(*args, **kwargs) [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] return func(*args, **kwargs) [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] raise e [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] nwinfo = self.network_api.allocate_for_instance( [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1877.045060] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] created_port_ids = self._update_ports_for_instance( [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] with excutils.save_and_reraise_exception(): [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self.force_reraise() [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] raise self.value [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] updated_port = self._update_port( [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] _ensure_no_port_binding_failure(port) [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1877.045333] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] raise exception.PortBindingFailed(port_id=port['id']) [ 1877.045640] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] nova.exception.PortBindingFailed: Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. [ 1877.045640] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] [ 1877.045640] env[62875]: INFO nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Terminating instance [ 1877.254026] env[62875]: DEBUG nova.compute.utils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1877.254367] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1877.254538] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1877.379203] env[62875]: DEBUG nova.policy [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53e9da2899614aebb3c9ddfbfde7cce6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59aacbcdd3fe493cb253d4c6b6df258f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1877.558041] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.558041] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquired lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.558041] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1877.757620] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1878.079377] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1878.171401] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f74ab1a-ece5-4598-9b17-88f70e48c7af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.181190] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2a64dc-b668-43c6-8c5e-791338e5a61e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.187848] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.217105] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d649f5-7b13-4714-8d61-46c463d78c06 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.227501] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fc5802-33c7-474f-b979-0855630f2dd8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.233027] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Successfully created port: 60bd0332-d45a-4167-a467-15d844758709 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1878.246962] env[62875]: DEBUG nova.compute.provider_tree [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1878.716654] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Releasing lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.719157] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1878.719157] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1878.719157] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dd38820-dc06-4d05-9142-7d538fd6e23c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.732074] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480b0de4-ff2c-49e7-88f7-80f13b5c132e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.750501] env[62875]: DEBUG nova.scheduler.client.report [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1878.764182] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9abafa57-9674-45f7-90cd-f80a8c80b567 could not be found. [ 1878.764182] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1878.764182] env[62875]: INFO nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1878.764182] env[62875]: DEBUG oslo.service.loopingcall [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.764182] env[62875]: DEBUG nova.compute.manager [-] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1878.764182] env[62875]: DEBUG nova.network.neutron [-] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1878.765989] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1878.787414] env[62875]: DEBUG nova.network.neutron [-] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1878.803910] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1878.804191] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1878.804348] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1878.804527] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1878.804675] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1878.804821] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1878.805035] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1878.805201] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1878.805365] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1878.805637] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1878.805703] env[62875]: DEBUG nova.virt.hardware [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1878.806777] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f1567f-3b44-4680-930c-5b996e0c9f53 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.814768] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e13371-3cf9-4eb1-a7c5-ce2d6274313e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.265470] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.265470] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1879.269094] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.458s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.294669] env[62875]: DEBUG nova.network.neutron [-] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.775062] env[62875]: DEBUG nova.compute.utils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1879.780158] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1879.780252] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1879.798421] env[62875]: INFO nova.compute.manager [-] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Took 1.04 seconds to deallocate network for instance. [ 1879.803441] env[62875]: DEBUG nova.compute.claims [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1879.803713] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.857475] env[62875]: DEBUG nova.policy [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce16af99c7094da8852de22f8a0c889c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cba5539c7f7d4a67bba8346a05d819d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1880.088820] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fe2908-389c-49c1-8d94-e215dc2684ab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.098123] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd348d0-ba65-4589-9630-9ac9f137c79b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.136678] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03ee745-2ae9-4726-88be-b32aad026881 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.145238] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3278cc4e-55e3-43e7-a9e8-038180bae52b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.159120] env[62875]: DEBUG nova.compute.provider_tree [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1880.281264] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1880.294131] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Successfully created port: 59ae6b07-482d-4aa5-a5f5-77d100c34f3f {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1880.666820] env[62875]: DEBUG nova.scheduler.client.report [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1880.810821] env[62875]: DEBUG nova.compute.manager [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Received event network-changed-c2c906c3-5e7b-42b6-9f48-6fa705fb4733 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1880.811096] env[62875]: DEBUG nova.compute.manager [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Refreshing instance network info cache due to event network-changed-c2c906c3-5e7b-42b6-9f48-6fa705fb4733. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1880.811253] env[62875]: DEBUG oslo_concurrency.lockutils [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] Acquiring lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.811400] env[62875]: DEBUG oslo_concurrency.lockutils [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] Acquired lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.811563] env[62875]: DEBUG nova.network.neutron [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Refreshing network info cache for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1880.947322] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "5f190f86-2faa-4b8e-821f-2113577541e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.947322] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "5f190f86-2faa-4b8e-821f-2113577541e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.172799] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.905s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.173545] env[62875]: ERROR nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Traceback (most recent call last): [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self.driver.spawn(context, instance, image_meta, [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] vm_ref = self.build_virtual_machine(instance, [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] vif_infos = vmwarevif.get_vif_info(self._session, [ 1881.173545] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] for vif in network_info: [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] return self._sync_wrapper(fn, *args, **kwargs) [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self.wait() [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self[:] = self._gt.wait() [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] return self._exit_event.wait() [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] result = hub.switch() [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1881.174041] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] return self.greenlet.switch() [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] result = function(*args, **kwargs) [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] return func(*args, **kwargs) [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] raise e [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] nwinfo = self.network_api.allocate_for_instance( [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] created_port_ids = self._update_ports_for_instance( [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] with excutils.save_and_reraise_exception(): [ 1881.174307] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] self.force_reraise() [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] raise self.value [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] updated_port = self._update_port( [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] _ensure_no_port_binding_failure(port) [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] raise exception.PortBindingFailed(port_id=port['id']) [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] nova.exception.PortBindingFailed: Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. [ 1881.174571] env[62875]: ERROR nova.compute.manager [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] [ 1881.175828] env[62875]: DEBUG nova.compute.utils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1881.178520] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Build of instance 495c8c6c-f90e-4a26-a248-0672e08df66a was re-scheduled: Binding failed for port 956de09f-0d66-4f05-baad-cd8e38ba632b, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1881.179234] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1881.179737] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.179737] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquired lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.179817] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1881.181590] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.495s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.183205] env[62875]: INFO nova.compute.claims [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1881.188303] env[62875]: ERROR nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. [ 1881.188303] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1881.188303] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1881.188303] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1881.188303] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1881.188303] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1881.188303] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1881.188303] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1881.188303] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1881.188303] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1881.188303] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1881.188303] env[62875]: ERROR nova.compute.manager raise self.value [ 1881.188303] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1881.188303] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1881.188303] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1881.188303] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1881.189370] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1881.189370] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1881.189370] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. [ 1881.189370] env[62875]: ERROR nova.compute.manager [ 1881.189370] env[62875]: Traceback (most recent call last): [ 1881.189370] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1881.189370] env[62875]: listener.cb(fileno) [ 1881.189370] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1881.189370] env[62875]: result = function(*args, **kwargs) [ 1881.189370] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1881.189370] env[62875]: return func(*args, **kwargs) [ 1881.189370] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1881.189370] env[62875]: raise e [ 1881.189370] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1881.189370] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1881.189370] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1881.189370] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1881.189370] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1881.189370] env[62875]: with excutils.save_and_reraise_exception(): [ 1881.189370] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1881.189370] env[62875]: self.force_reraise() [ 1881.189370] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1881.189370] env[62875]: raise self.value [ 1881.189370] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1881.189370] env[62875]: updated_port = self._update_port( [ 1881.189370] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1881.189370] env[62875]: _ensure_no_port_binding_failure(port) [ 1881.189370] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1881.189370] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1881.190403] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. [ 1881.190403] env[62875]: Removing descriptor: 21 [ 1881.294869] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1881.324837] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1881.325160] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1881.325330] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1881.325518] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1881.325666] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1881.325815] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1881.326113] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1881.326314] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1881.326472] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1881.327447] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1881.327447] env[62875]: DEBUG nova.virt.hardware [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1881.327674] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2001756-d293-40b4-b56e-9e21530d2e21 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.337025] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c57da07-637e-4b4c-980e-b46173593726 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.353553] env[62875]: ERROR nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Traceback (most recent call last): [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] yield resources [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self.driver.spawn(context, instance, image_meta, [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] vm_ref = self.build_virtual_machine(instance, [ 1881.353553] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] vif_infos = vmwarevif.get_vif_info(self._session, [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] for vif in network_info: [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] return self._sync_wrapper(fn, *args, **kwargs) [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self.wait() [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self[:] = self._gt.wait() [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] return self._exit_event.wait() [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1881.353851] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] current.throw(*self._exc) [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] result = function(*args, **kwargs) [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] return func(*args, **kwargs) [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] raise e [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] nwinfo = self.network_api.allocate_for_instance( [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] created_port_ids = self._update_ports_for_instance( [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] with excutils.save_and_reraise_exception(): [ 1881.354776] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self.force_reraise() [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] raise self.value [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] updated_port = self._update_port( [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] _ensure_no_port_binding_failure(port) [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] raise exception.PortBindingFailed(port_id=port['id']) [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] nova.exception.PortBindingFailed: Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. [ 1881.355198] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] [ 1881.355198] env[62875]: INFO nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Terminating instance [ 1881.365256] env[62875]: DEBUG nova.network.neutron [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1881.485503] env[62875]: DEBUG nova.network.neutron [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.622145] env[62875]: ERROR nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. [ 1881.622145] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1881.622145] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1881.622145] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1881.622145] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1881.622145] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1881.622145] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1881.622145] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1881.622145] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1881.622145] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1881.622145] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1881.622145] env[62875]: ERROR nova.compute.manager raise self.value [ 1881.622145] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1881.622145] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1881.622145] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1881.622145] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1881.622489] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1881.622489] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1881.622489] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. [ 1881.622489] env[62875]: ERROR nova.compute.manager [ 1881.622489] env[62875]: Traceback (most recent call last): [ 1881.622489] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1881.622489] env[62875]: listener.cb(fileno) [ 1881.622489] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1881.622489] env[62875]: result = function(*args, **kwargs) [ 1881.622489] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1881.622489] env[62875]: return func(*args, **kwargs) [ 1881.622489] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1881.622489] env[62875]: raise e [ 1881.622489] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1881.622489] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1881.622489] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1881.622489] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1881.622489] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1881.622489] env[62875]: with excutils.save_and_reraise_exception(): [ 1881.622489] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1881.622489] env[62875]: self.force_reraise() [ 1881.622489] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1881.622489] env[62875]: raise self.value [ 1881.622489] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1881.622489] env[62875]: updated_port = self._update_port( [ 1881.622489] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1881.622489] env[62875]: _ensure_no_port_binding_failure(port) [ 1881.622489] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1881.622489] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1881.623077] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. [ 1881.623077] env[62875]: Removing descriptor: 18 [ 1881.623077] env[62875]: ERROR nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Traceback (most recent call last): [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] yield resources [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self.driver.spawn(context, instance, image_meta, [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1881.623077] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] vm_ref = self.build_virtual_machine(instance, [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] vif_infos = vmwarevif.get_vif_info(self._session, [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] for vif in network_info: [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] return self._sync_wrapper(fn, *args, **kwargs) [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self.wait() [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self[:] = self._gt.wait() [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] return self._exit_event.wait() [ 1881.623328] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] result = hub.switch() [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] return self.greenlet.switch() [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] result = function(*args, **kwargs) [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] return func(*args, **kwargs) [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] raise e [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] nwinfo = self.network_api.allocate_for_instance( [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1881.623592] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] created_port_ids = self._update_ports_for_instance( [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] with excutils.save_and_reraise_exception(): [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self.force_reraise() [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] raise self.value [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] updated_port = self._update_port( [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] _ensure_no_port_binding_failure(port) [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1881.624011] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] raise exception.PortBindingFailed(port_id=port['id']) [ 1881.624298] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] nova.exception.PortBindingFailed: Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. [ 1881.624298] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] [ 1881.624298] env[62875]: INFO nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Terminating instance [ 1881.712907] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1881.833551] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.858373] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Acquiring lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.860235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Acquired lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.860235] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1881.987414] env[62875]: DEBUG oslo_concurrency.lockutils [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] Releasing lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.987757] env[62875]: DEBUG nova.compute.manager [req-d0e57477-b562-456f-87fd-e214790bb93f req-3683eed6-33fe-4110-b85b-41f437a608bd service nova] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Received event network-vif-deleted-c2c906c3-5e7b-42b6-9f48-6fa705fb4733 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1882.133275] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Acquiring lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.133480] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Acquired lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.133661] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1882.310172] env[62875]: DEBUG nova.compute.manager [req-16c33057-8b1a-4ed9-b082-de86dff29e6a req-00d9c7d3-8ed6-426d-908a-45821691a778 service nova] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Received event network-changed-59ae6b07-482d-4aa5-a5f5-77d100c34f3f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1882.310252] env[62875]: DEBUG nova.compute.manager [req-16c33057-8b1a-4ed9-b082-de86dff29e6a req-00d9c7d3-8ed6-426d-908a-45821691a778 service nova] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Refreshing instance network info cache due to event network-changed-59ae6b07-482d-4aa5-a5f5-77d100c34f3f. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1882.310445] env[62875]: DEBUG oslo_concurrency.lockutils [req-16c33057-8b1a-4ed9-b082-de86dff29e6a req-00d9c7d3-8ed6-426d-908a-45821691a778 service nova] Acquiring lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.336475] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Releasing lock "refresh_cache-495c8c6c-f90e-4a26-a248-0672e08df66a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.336720] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1882.337650] env[62875]: DEBUG nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1882.337650] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1882.377672] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1882.380079] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1882.479324] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.623592] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba40d6f2-e561-463a-861a-c69fa7d08e6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.633100] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0766c3-1a3c-4f72-a2f2-af442a6b7ccd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.666609] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1052836-6c19-455f-aaec-be5b16bd56be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.675388] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc84024-56db-4d68-ae60-4eeb81cab9b5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.688910] env[62875]: DEBUG nova.compute.provider_tree [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1882.690945] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1882.745591] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.885607] env[62875]: DEBUG nova.network.neutron [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.984746] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Releasing lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.985368] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1882.987182] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1882.987182] env[62875]: DEBUG oslo_concurrency.lockutils [req-16c33057-8b1a-4ed9-b082-de86dff29e6a req-00d9c7d3-8ed6-426d-908a-45821691a778 service nova] Acquired lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.987182] env[62875]: DEBUG nova.network.neutron [req-16c33057-8b1a-4ed9-b082-de86dff29e6a req-00d9c7d3-8ed6-426d-908a-45821691a778 service nova] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Refreshing network info cache for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1882.989203] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a827792-9f73-4705-b6bd-b7ecb0a4a9ab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.002990] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be19f002-f189-4870-bd5e-c59b79e01b4b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.029858] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 79d018cc-2400-4925-a09f-e0aaaa8b22db could not be found. [ 1883.030132] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1883.030343] env[62875]: INFO nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1883.030580] env[62875]: DEBUG oslo.service.loopingcall [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1883.030799] env[62875]: DEBUG nova.compute.manager [-] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1883.030900] env[62875]: DEBUG nova.network.neutron [-] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1883.049529] env[62875]: DEBUG nova.network.neutron [-] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1883.193821] env[62875]: DEBUG nova.scheduler.client.report [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1883.207239] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "6f936641-750d-49ae-8beb-bca35305d10d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.207512] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "6f936641-750d-49ae-8beb-bca35305d10d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.249036] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Releasing lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.249509] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1883.249715] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1883.250009] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba4dd218-9c68-49d3-967d-1444f4b14686 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.260892] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad29bf7-dfff-48bf-84cc-6b21e2493d65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.290046] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f could not be found. [ 1883.290212] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1883.290402] env[62875]: INFO nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1883.290648] env[62875]: DEBUG oslo.service.loopingcall [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1883.290932] env[62875]: DEBUG nova.compute.manager [-] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1883.290996] env[62875]: DEBUG nova.network.neutron [-] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1883.309960] env[62875]: DEBUG nova.network.neutron [-] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1883.389598] env[62875]: INFO nova.compute.manager [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: 495c8c6c-f90e-4a26-a248-0672e08df66a] Took 1.05 seconds to deallocate network for instance. [ 1883.520260] env[62875]: DEBUG nova.network.neutron [req-16c33057-8b1a-4ed9-b082-de86dff29e6a req-00d9c7d3-8ed6-426d-908a-45821691a778 service nova] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1883.556458] env[62875]: DEBUG nova.network.neutron [-] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.654562] env[62875]: DEBUG nova.network.neutron [req-16c33057-8b1a-4ed9-b082-de86dff29e6a req-00d9c7d3-8ed6-426d-908a-45821691a778 service nova] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.708806] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.527s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.709293] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1883.714871] env[62875]: DEBUG nova.compute.manager [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Received event network-changed-60bd0332-d45a-4167-a467-15d844758709 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1883.715065] env[62875]: DEBUG nova.compute.manager [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Refreshing instance network info cache due to event network-changed-60bd0332-d45a-4167-a467-15d844758709. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1883.715284] env[62875]: DEBUG oslo_concurrency.lockutils [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] Acquiring lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.715436] env[62875]: DEBUG oslo_concurrency.lockutils [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] Acquired lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.715665] env[62875]: DEBUG nova.network.neutron [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Refreshing network info cache for port 60bd0332-d45a-4167-a467-15d844758709 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1883.716832] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.997s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.736019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.736019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.809641] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "41ec8810-3f17-4f59-9828-a4a2e873eab4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.809951] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "41ec8810-3f17-4f59-9828-a4a2e873eab4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.815710] env[62875]: DEBUG nova.network.neutron [-] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.059673] env[62875]: INFO nova.compute.manager [-] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Took 1.03 seconds to deallocate network for instance. [ 1884.067626] env[62875]: DEBUG nova.compute.claims [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1884.070173] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.160229] env[62875]: DEBUG oslo_concurrency.lockutils [req-16c33057-8b1a-4ed9-b082-de86dff29e6a req-00d9c7d3-8ed6-426d-908a-45821691a778 service nova] Releasing lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.218924] env[62875]: DEBUG nova.compute.utils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1884.232189] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1884.232447] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1884.258909] env[62875]: DEBUG nova.network.neutron [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1884.311178] env[62875]: DEBUG nova.network.neutron [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.315732] env[62875]: DEBUG nova.policy [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27c4cef59626498dbacd8dea053943fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca934e214e3447cb85b8a3d9ff8cb4e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1884.320023] env[62875]: INFO nova.compute.manager [-] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Took 1.03 seconds to deallocate network for instance. [ 1884.322912] env[62875]: DEBUG nova.compute.claims [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1884.323208] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.432485] env[62875]: INFO nova.scheduler.client.report [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Deleted allocations for instance 495c8c6c-f90e-4a26-a248-0672e08df66a [ 1884.738702] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1884.767023] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance cefcbb6a-378b-4927-b115-d648017502e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.767482] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 9abafa57-9674-45f7-90cd-f80a8c80b567 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.767482] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.767482] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 79d018cc-2400-4925-a09f-e0aaaa8b22db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.767614] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 4f036275-fd33-440d-acba-6e475cda62c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.823637] env[62875]: DEBUG oslo_concurrency.lockutils [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] Releasing lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.824120] env[62875]: DEBUG nova.compute.manager [req-3a762371-6bf6-403a-9f04-e49d33626796 req-559581d6-91b5-4837-83ef-5510b465cb0f service nova] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Received event network-vif-deleted-60bd0332-d45a-4167-a467-15d844758709 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1884.900794] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Successfully created port: 32b18cd3-4268-404f-92d1-574827f7d571 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1884.924466] env[62875]: DEBUG nova.compute.manager [req-8247514d-6d5f-44de-9497-7700720154c1 req-5bd28952-4795-4cc8-b3c1-89b5573b1cb5 service nova] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Received event network-vif-deleted-59ae6b07-482d-4aa5-a5f5-77d100c34f3f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1884.946294] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfde839a-b7c5-4262-a363-8eb4091573b1 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "495c8c6c-f90e-4a26-a248-0672e08df66a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.267s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.272466] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance edf6724b-3a8c-4c19-926e-8f1b081ab50f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1885.452338] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1885.567557] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Acquiring lock "5cf1f620-d0da-4e81-8d1f-e881c47dcad1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.567823] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Lock "5cf1f620-d0da-4e81-8d1f-e881c47dcad1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.753009] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1885.778588] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 820d7177-3e8f-4dd7-b7c3-c7abd4a62158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1885.789389] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T05:11:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='130174483',id=24,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1615477237',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1885.789632] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1885.789799] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1885.789989] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1885.790177] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1885.790333] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1885.790537] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1885.790696] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1885.790860] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1885.791033] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1885.791218] env[62875]: DEBUG nova.virt.hardware [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1885.792100] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42701e7e-7627-4ab8-80aa-23e8a22a6c81 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.800929] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fc8ccc-825a-4b4a-9782-008ba88295bc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.986523] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.278935] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2896c309-a702-498b-8b44-f01620b597e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1886.337193] env[62875]: DEBUG nova.compute.manager [req-512d9f5b-cfd9-46b5-9ae2-2c3fd46befa3 req-9f7c7fce-642f-4d82-be00-1d11f09e5aa8 service nova] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Received event network-changed-32b18cd3-4268-404f-92d1-574827f7d571 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1886.337789] env[62875]: DEBUG nova.compute.manager [req-512d9f5b-cfd9-46b5-9ae2-2c3fd46befa3 req-9f7c7fce-642f-4d82-be00-1d11f09e5aa8 service nova] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Refreshing instance network info cache due to event network-changed-32b18cd3-4268-404f-92d1-574827f7d571. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1886.337789] env[62875]: DEBUG oslo_concurrency.lockutils [req-512d9f5b-cfd9-46b5-9ae2-2c3fd46befa3 req-9f7c7fce-642f-4d82-be00-1d11f09e5aa8 service nova] Acquiring lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.337913] env[62875]: DEBUG oslo_concurrency.lockutils [req-512d9f5b-cfd9-46b5-9ae2-2c3fd46befa3 req-9f7c7fce-642f-4d82-be00-1d11f09e5aa8 service nova] Acquired lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.338517] env[62875]: DEBUG nova.network.neutron [req-512d9f5b-cfd9-46b5-9ae2-2c3fd46befa3 req-9f7c7fce-642f-4d82-be00-1d11f09e5aa8 service nova] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Refreshing network info cache for port 32b18cd3-4268-404f-92d1-574827f7d571 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1886.500368] env[62875]: ERROR nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. [ 1886.500368] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1886.500368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1886.500368] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1886.500368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1886.500368] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1886.500368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1886.500368] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1886.500368] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1886.500368] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1886.500368] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1886.500368] env[62875]: ERROR nova.compute.manager raise self.value [ 1886.500368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1886.500368] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1886.500368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1886.500368] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1886.500748] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1886.500748] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1886.500748] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. [ 1886.500748] env[62875]: ERROR nova.compute.manager [ 1886.500748] env[62875]: Traceback (most recent call last): [ 1886.500748] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1886.500748] env[62875]: listener.cb(fileno) [ 1886.500748] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1886.500748] env[62875]: result = function(*args, **kwargs) [ 1886.500748] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1886.500748] env[62875]: return func(*args, **kwargs) [ 1886.500748] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1886.500748] env[62875]: raise e [ 1886.500748] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1886.500748] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1886.500748] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1886.500748] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1886.500748] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1886.500748] env[62875]: with excutils.save_and_reraise_exception(): [ 1886.500748] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1886.500748] env[62875]: self.force_reraise() [ 1886.500748] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1886.500748] env[62875]: raise self.value [ 1886.500748] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1886.500748] env[62875]: updated_port = self._update_port( [ 1886.500748] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1886.500748] env[62875]: _ensure_no_port_binding_failure(port) [ 1886.500748] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1886.500748] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1886.501399] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. [ 1886.501399] env[62875]: Removing descriptor: 18 [ 1886.501399] env[62875]: ERROR nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Traceback (most recent call last): [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] yield resources [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self.driver.spawn(context, instance, image_meta, [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1886.501399] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] vm_ref = self.build_virtual_machine(instance, [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] vif_infos = vmwarevif.get_vif_info(self._session, [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] for vif in network_info: [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] return self._sync_wrapper(fn, *args, **kwargs) [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self.wait() [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self[:] = self._gt.wait() [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] return self._exit_event.wait() [ 1886.501658] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] result = hub.switch() [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] return self.greenlet.switch() [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] result = function(*args, **kwargs) [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] return func(*args, **kwargs) [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] raise e [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] nwinfo = self.network_api.allocate_for_instance( [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1886.501928] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] created_port_ids = self._update_ports_for_instance( [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] with excutils.save_and_reraise_exception(): [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self.force_reraise() [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] raise self.value [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] updated_port = self._update_port( [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] _ensure_no_port_binding_failure(port) [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1886.502207] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] raise exception.PortBindingFailed(port_id=port['id']) [ 1886.502453] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] nova.exception.PortBindingFailed: Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. [ 1886.502453] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] [ 1886.502453] env[62875]: INFO nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Terminating instance [ 1886.783732] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 74a1c3db-26b1-426e-be47-621c79ef9029 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1886.862176] env[62875]: DEBUG nova.network.neutron [req-512d9f5b-cfd9-46b5-9ae2-2c3fd46befa3 req-9f7c7fce-642f-4d82-be00-1d11f09e5aa8 service nova] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1886.975160] env[62875]: DEBUG nova.network.neutron [req-512d9f5b-cfd9-46b5-9ae2-2c3fd46befa3 req-9f7c7fce-642f-4d82-be00-1d11f09e5aa8 service nova] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1887.006277] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Acquiring lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.286522] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance ec4405a5-de44-4749-9225-3945db05ca6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1887.477626] env[62875]: DEBUG oslo_concurrency.lockutils [req-512d9f5b-cfd9-46b5-9ae2-2c3fd46befa3 req-9f7c7fce-642f-4d82-be00-1d11f09e5aa8 service nova] Releasing lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.478095] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Acquired lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.478301] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1887.791947] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7d241bf8-6f7a-467f-9640-a9819d5cca72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1887.966521] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "fd7ba11a-18d1-4f96-a445-eedce740b0c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.966810] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "fd7ba11a-18d1-4f96-a445-eedce740b0c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.012323] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1888.299148] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 793da91d-461a-465b-b9a3-c5fa0f5b877d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1888.299148] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.792989] env[62875]: DEBUG nova.compute.manager [req-1c8e2f9e-38e6-44a3-b8c1-ce13bbf7de3d req-570a59f9-5cd3-41a1-a3a8-e6927594fb92 service nova] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Received event network-vif-deleted-32b18cd3-4268-404f-92d1-574827f7d571 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1888.806299] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 4ee68d78-b265-4ee8-afcc-ce6ed150fb73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1888.807709] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Releasing lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.808127] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1888.808352] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1888.808788] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5e85bda-b14e-44f4-b908-b294c4d92341 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.825575] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927a5474-f15e-4090-8f60-54d619e5fc9f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.862338] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4f036275-fd33-440d-acba-6e475cda62c2 could not be found. [ 1888.864579] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1888.864579] env[62875]: INFO nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1888.864579] env[62875]: DEBUG oslo.service.loopingcall [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1888.864579] env[62875]: DEBUG nova.compute.manager [-] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1888.864579] env[62875]: DEBUG nova.network.neutron [-] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1888.889078] env[62875]: DEBUG nova.network.neutron [-] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1889.310593] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1889.392043] env[62875]: DEBUG nova.network.neutron [-] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.819776] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 6eb92420-57b1-4a7d-973f-10cd47be0416 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1889.894953] env[62875]: INFO nova.compute.manager [-] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Took 1.03 seconds to deallocate network for instance. [ 1889.897422] env[62875]: DEBUG nova.compute.claims [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1889.897577] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.335693] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a19b7959-df3c-47e8-b920-edfe82c36489 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1890.840422] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance e11311ed-6804-4df4-a775-9060463ac927 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1891.344842] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d3270b4a-2b81-41f5-a2af-5b7f441e4a2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1891.850887] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 782e6663-202b-4ed0-8a1a-cc54f246143b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1891.984624] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquiring lock "7969485a-ccd6-48e0-bdea-b8920af28843" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.984624] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "7969485a-ccd6-48e0-bdea-b8920af28843" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.355368] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 100c3541-3af3-4d3c-8060-2235f18f51e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1892.859707] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 77d57f64-9bab-46f1-87b4-62bac5c5d2bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1893.368671] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 5f190f86-2faa-4b8e-821f-2113577541e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1893.871836] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 6f936641-750d-49ae-8beb-bca35305d10d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1894.375935] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1894.881875] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 41ec8810-3f17-4f59-9828-a4a2e873eab4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1894.881875] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1894.881875] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1895.283774] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ce17b8-42d6-4d62-aa0f-34aac1834026 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.291176] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761858d7-1604-4e40-b876-9f1d04997358 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.320794] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2da4b16-cba4-4c1f-890a-f00bd1e79410 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.328387] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bd2b22-ec85-4885-9aaf-f88004287a64 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.341310] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1895.755920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "c6de797f-03f7-4dca-9c6a-e7b840990be6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.756573] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "c6de797f-03f7-4dca-9c6a-e7b840990be6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.845293] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1896.350923] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1896.351146] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.634s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.351427] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.554s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.354628] env[62875]: INFO nova.compute.claims [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1897.356817] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.711117] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d05820-a2b2-4e28-803a-0b0035bb0ae3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.718914] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30043735-508d-430d-85e2-630710bd3d50 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.747964] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a50e367-6fe2-4c88-a1f1-75dd06f6686a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.754936] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fbe029-6e08-4e27-8e70-11925de510f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.767539] env[62875]: DEBUG nova.compute.provider_tree [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1897.863675] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.863916] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1897.863979] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1898.274287] env[62875]: DEBUG nova.scheduler.client.report [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1898.367740] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1898.367995] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1898.368051] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1898.368241] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1898.368376] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1898.368498] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1898.368628] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1898.369063] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1898.776862] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.777445] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1898.780367] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.359s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.781747] env[62875]: INFO nova.compute.claims [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1899.286229] env[62875]: DEBUG nova.compute.utils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1899.289723] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1899.289898] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1899.338953] env[62875]: DEBUG nova.policy [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d9acb28555445e2b6ed6dd3fed31474', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1bdec99f07f548569db90849aec2edb6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1899.750027] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Successfully created port: 8b1988e9-0bbf-49e4-989b-e71d3cfcf452 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1899.793347] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1900.156991] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6558504-f936-474e-8ae5-215259400797 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.164370] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930ca8fa-6d9e-44c9-9bc3-61439a2e70ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.197753] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6c2953-aaa7-4607-bc47-04a823221a81 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.205706] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0446f7-266a-4d6f-9264-9278db23628b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.220737] env[62875]: DEBUG nova.compute.provider_tree [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1900.724700] env[62875]: DEBUG nova.scheduler.client.report [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1900.728741] env[62875]: DEBUG nova.compute.manager [req-970521f9-f4c6-4e04-857b-47be36da28b3 req-cafcd836-59e8-420c-8f4f-08ae033c8499 service nova] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Received event network-changed-8b1988e9-0bbf-49e4-989b-e71d3cfcf452 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1900.728848] env[62875]: DEBUG nova.compute.manager [req-970521f9-f4c6-4e04-857b-47be36da28b3 req-cafcd836-59e8-420c-8f4f-08ae033c8499 service nova] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Refreshing instance network info cache due to event network-changed-8b1988e9-0bbf-49e4-989b-e71d3cfcf452. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1900.729163] env[62875]: DEBUG oslo_concurrency.lockutils [req-970521f9-f4c6-4e04-857b-47be36da28b3 req-cafcd836-59e8-420c-8f4f-08ae033c8499 service nova] Acquiring lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.729410] env[62875]: DEBUG oslo_concurrency.lockutils [req-970521f9-f4c6-4e04-857b-47be36da28b3 req-cafcd836-59e8-420c-8f4f-08ae033c8499 service nova] Acquired lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.729587] env[62875]: DEBUG nova.network.neutron [req-970521f9-f4c6-4e04-857b-47be36da28b3 req-cafcd836-59e8-420c-8f4f-08ae033c8499 service nova] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Refreshing network info cache for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1900.811894] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1900.843017] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1900.843017] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.843017] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1900.843221] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.843221] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1900.843221] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1900.843221] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1900.843221] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1900.843518] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1900.843823] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1900.844111] env[62875]: DEBUG nova.virt.hardware [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1900.845185] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b93fcf3-986e-4c51-8fb0-a4224b92d28b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.853507] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89752a39-0c9c-4a83-9905-035dda360b10 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.969393] env[62875]: ERROR nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. [ 1900.969393] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1900.969393] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1900.969393] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1900.969393] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1900.969393] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1900.969393] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1900.969393] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1900.969393] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1900.969393] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1900.969393] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1900.969393] env[62875]: ERROR nova.compute.manager raise self.value [ 1900.969393] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1900.969393] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1900.969393] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1900.969393] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1900.969887] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1900.969887] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1900.969887] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. [ 1900.969887] env[62875]: ERROR nova.compute.manager [ 1900.969887] env[62875]: Traceback (most recent call last): [ 1900.969887] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1900.969887] env[62875]: listener.cb(fileno) [ 1900.969887] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1900.969887] env[62875]: result = function(*args, **kwargs) [ 1900.969887] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1900.969887] env[62875]: return func(*args, **kwargs) [ 1900.969887] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1900.969887] env[62875]: raise e [ 1900.969887] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1900.969887] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1900.969887] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1900.969887] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1900.969887] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1900.969887] env[62875]: with excutils.save_and_reraise_exception(): [ 1900.969887] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1900.969887] env[62875]: self.force_reraise() [ 1900.969887] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1900.969887] env[62875]: raise self.value [ 1900.969887] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1900.969887] env[62875]: updated_port = self._update_port( [ 1900.969887] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1900.969887] env[62875]: _ensure_no_port_binding_failure(port) [ 1900.969887] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1900.969887] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1900.970656] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. [ 1900.970656] env[62875]: Removing descriptor: 18 [ 1900.970656] env[62875]: ERROR nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Traceback (most recent call last): [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] yield resources [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self.driver.spawn(context, instance, image_meta, [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1900.970656] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] vm_ref = self.build_virtual_machine(instance, [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] vif_infos = vmwarevif.get_vif_info(self._session, [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] for vif in network_info: [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] return self._sync_wrapper(fn, *args, **kwargs) [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self.wait() [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self[:] = self._gt.wait() [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] return self._exit_event.wait() [ 1900.970978] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] result = hub.switch() [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] return self.greenlet.switch() [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] result = function(*args, **kwargs) [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] return func(*args, **kwargs) [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] raise e [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] nwinfo = self.network_api.allocate_for_instance( [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1900.971323] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] created_port_ids = self._update_ports_for_instance( [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] with excutils.save_and_reraise_exception(): [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self.force_reraise() [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] raise self.value [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] updated_port = self._update_port( [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] _ensure_no_port_binding_failure(port) [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1900.971721] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] raise exception.PortBindingFailed(port_id=port['id']) [ 1900.972044] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] nova.exception.PortBindingFailed: Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. [ 1900.972044] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] [ 1900.972044] env[62875]: INFO nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Terminating instance [ 1901.232462] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.233084] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1901.238232] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.577s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.239779] env[62875]: INFO nova.compute.claims [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1901.256980] env[62875]: DEBUG nova.network.neutron [req-970521f9-f4c6-4e04-857b-47be36da28b3 req-cafcd836-59e8-420c-8f4f-08ae033c8499 service nova] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1901.371354] env[62875]: DEBUG nova.network.neutron [req-970521f9-f4c6-4e04-857b-47be36da28b3 req-cafcd836-59e8-420c-8f4f-08ae033c8499 service nova] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.474948] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Acquiring lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.739924] env[62875]: DEBUG nova.compute.utils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1901.741542] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1901.741737] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1901.787066] env[62875]: DEBUG nova.policy [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52db0a44319f46939b47247136267ceb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5062c761ea34842a2f6179ae76f3465', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1901.873812] env[62875]: DEBUG oslo_concurrency.lockutils [req-970521f9-f4c6-4e04-857b-47be36da28b3 req-cafcd836-59e8-420c-8f4f-08ae033c8499 service nova] Releasing lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.875084] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Acquired lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.875084] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.174025] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Successfully created port: 1be88be5-5769-466a-87e9-5438fd2017fb {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1902.245830] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1902.402995] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1902.493638] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.661974] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2452ab59-b60f-4f99-8e59-e0d4c9784638 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.672773] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3965f8-f432-4f9d-9a8c-0442b3a5eb57 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.705732] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da22abe-1ca6-4286-ae0a-bf3e372536c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.713353] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cc9d8c-2ac1-4442-a5cc-60b5f34ccce1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.726408] env[62875]: DEBUG nova.compute.provider_tree [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1902.751993] env[62875]: DEBUG nova.compute.manager [req-a467383e-7974-44da-8332-defe5ffdf7d8 req-6ff6d1ae-65dd-4763-8a99-590309b20b6d service nova] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Received event network-vif-deleted-8b1988e9-0bbf-49e4-989b-e71d3cfcf452 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1902.996848] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Releasing lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.997325] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1902.997562] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1902.997878] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c1c3b1a7-fcc8-40b2-be8a-4cc97bad4c9d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.006908] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b8f464-41ef-41de-bed7-e7107d5fa6a4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.030783] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance edf6724b-3a8c-4c19-926e-8f1b081ab50f could not be found. [ 1903.031012] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1903.031203] env[62875]: INFO nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1903.031448] env[62875]: DEBUG oslo.service.loopingcall [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.031654] env[62875]: DEBUG nova.compute.manager [-] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1903.031748] env[62875]: DEBUG nova.network.neutron [-] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1903.048217] env[62875]: DEBUG nova.network.neutron [-] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1903.229115] env[62875]: DEBUG nova.scheduler.client.report [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1903.258620] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1903.286252] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1903.287092] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1903.287092] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1903.287092] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1903.287092] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1903.287266] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1903.287362] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1903.287519] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1903.287682] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1903.287840] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1903.290810] env[62875]: DEBUG nova.virt.hardware [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1903.290810] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78515c09-fbf5-4de0-b92b-daad9bede3c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.297552] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b29c223-92e9-4fcc-a98f-e80ba29af689 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.509097] env[62875]: ERROR nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. [ 1903.509097] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1903.509097] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1903.509097] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1903.509097] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1903.509097] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1903.509097] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1903.509097] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1903.509097] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1903.509097] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1903.509097] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1903.509097] env[62875]: ERROR nova.compute.manager raise self.value [ 1903.509097] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1903.509097] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1903.509097] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1903.509097] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1903.509524] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1903.509524] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1903.509524] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. [ 1903.509524] env[62875]: ERROR nova.compute.manager [ 1903.509524] env[62875]: Traceback (most recent call last): [ 1903.509524] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1903.509524] env[62875]: listener.cb(fileno) [ 1903.509524] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1903.509524] env[62875]: result = function(*args, **kwargs) [ 1903.509524] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1903.509524] env[62875]: return func(*args, **kwargs) [ 1903.509524] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1903.509524] env[62875]: raise e [ 1903.509524] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1903.509524] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1903.509524] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1903.509524] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1903.509524] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1903.509524] env[62875]: with excutils.save_and_reraise_exception(): [ 1903.509524] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1903.509524] env[62875]: self.force_reraise() [ 1903.509524] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1903.509524] env[62875]: raise self.value [ 1903.509524] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1903.509524] env[62875]: updated_port = self._update_port( [ 1903.509524] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1903.509524] env[62875]: _ensure_no_port_binding_failure(port) [ 1903.509524] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1903.509524] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1903.510171] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. [ 1903.510171] env[62875]: Removing descriptor: 18 [ 1903.510171] env[62875]: ERROR nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Traceback (most recent call last): [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] yield resources [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self.driver.spawn(context, instance, image_meta, [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1903.510171] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] vm_ref = self.build_virtual_machine(instance, [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] vif_infos = vmwarevif.get_vif_info(self._session, [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] for vif in network_info: [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] return self._sync_wrapper(fn, *args, **kwargs) [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self.wait() [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self[:] = self._gt.wait() [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] return self._exit_event.wait() [ 1903.510458] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] result = hub.switch() [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] return self.greenlet.switch() [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] result = function(*args, **kwargs) [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] return func(*args, **kwargs) [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] raise e [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] nwinfo = self.network_api.allocate_for_instance( [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1903.510874] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] created_port_ids = self._update_ports_for_instance( [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] with excutils.save_and_reraise_exception(): [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self.force_reraise() [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] raise self.value [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] updated_port = self._update_port( [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] _ensure_no_port_binding_failure(port) [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1903.511713] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] raise exception.PortBindingFailed(port_id=port['id']) [ 1903.512061] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] nova.exception.PortBindingFailed: Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. [ 1903.512061] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] [ 1903.512061] env[62875]: INFO nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Terminating instance [ 1903.550634] env[62875]: DEBUG nova.network.neutron [-] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.706632] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.738979] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.739263] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1903.742071] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.458s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.744023] env[62875]: INFO nova.compute.claims [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1904.016790] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.017070] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.017170] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1904.052974] env[62875]: INFO nova.compute.manager [-] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Took 1.02 seconds to deallocate network for instance. [ 1904.055446] env[62875]: DEBUG nova.compute.claims [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1904.055446] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.249561] env[62875]: DEBUG nova.compute.utils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1904.252884] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1904.253020] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1904.298254] env[62875]: DEBUG nova.policy [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71e5ba7a23624a27a339e014d8326d54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928c87a6f5a84f52b8a07a9f6c5f5daa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1904.537947] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1904.623932] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.659865] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Successfully created port: c11dd263-3314-4786-9f42-cda65561833a {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1904.701286] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1904.754406] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1904.793014] env[62875]: DEBUG nova.compute.manager [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Received event network-changed-1be88be5-5769-466a-87e9-5438fd2017fb {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1904.793245] env[62875]: DEBUG nova.compute.manager [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Refreshing instance network info cache due to event network-changed-1be88be5-5769-466a-87e9-5438fd2017fb. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1904.793445] env[62875]: DEBUG oslo_concurrency.lockutils [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] Acquiring lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.127052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.127345] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1905.127536] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1905.127848] env[62875]: DEBUG oslo_concurrency.lockutils [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] Acquired lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.128030] env[62875]: DEBUG nova.network.neutron [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Refreshing network info cache for port 1be88be5-5769-466a-87e9-5438fd2017fb {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1905.129108] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7cf56e16-b91c-4c86-93f2-8aa1ff45303b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.133187] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3fac08-483e-4ef8-b335-3c7c17a189ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.141204] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bb74cf-a9e0-43de-81cc-795eee80a750 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.147227] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d5337e-01e9-4d58-9f8e-e091d7acd8c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.191690] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e64d12-d77e-419e-828e-c789a8086fd5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.194564] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 820d7177-3e8f-4dd7-b7c3-c7abd4a62158 could not be found. [ 1905.194649] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1905.194815] env[62875]: INFO nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1905.195078] env[62875]: DEBUG oslo.service.loopingcall [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1905.195305] env[62875]: DEBUG nova.compute.manager [-] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1905.195401] env[62875]: DEBUG nova.network.neutron [-] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1905.202261] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66416ba1-d10a-4889-a1fe-17ea57de4e99 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.215410] env[62875]: DEBUG nova.compute.provider_tree [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1905.219673] env[62875]: DEBUG nova.network.neutron [-] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1905.264077] env[62875]: INFO nova.virt.block_device [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Booting with volume b252e1e9-ae51-4313-aa11-72f5789980a6 at /dev/sda [ 1905.316148] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5433b84-1d8b-4b3f-94cc-f354ae4fce19 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.325365] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d524dec8-914a-4f20-828a-154befdb3b79 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.348229] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81d61fd9-3531-405a-a53c-d913279e3fe2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.355687] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d57aa2c-86d8-433c-bc54-4fb46d01b144 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.381530] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7d94ef-70ef-4fbb-8294-9196eb1e3477 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.387611] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b23ef36-58b1-4d20-8095-4c85bce12fef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.400886] env[62875]: DEBUG nova.virt.block_device [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Updating existing volume attachment record: eb491d43-7ff4-40db-86ed-0897054690e9 {{(pid=62875) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1905.649680] env[62875]: DEBUG nova.network.neutron [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1905.705697] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.705939] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.706370] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1905.718368] env[62875]: DEBUG nova.scheduler.client.report [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1905.723550] env[62875]: DEBUG nova.network.neutron [-] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.737956] env[62875]: DEBUG nova.network.neutron [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.881930] env[62875]: ERROR nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. [ 1905.881930] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1905.881930] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1905.881930] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1905.881930] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1905.881930] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1905.881930] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1905.881930] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1905.881930] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1905.881930] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1905.881930] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1905.881930] env[62875]: ERROR nova.compute.manager raise self.value [ 1905.881930] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1905.881930] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1905.881930] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1905.881930] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1905.882335] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1905.882335] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1905.882335] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. [ 1905.882335] env[62875]: ERROR nova.compute.manager [ 1905.882335] env[62875]: Traceback (most recent call last): [ 1905.882335] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1905.882335] env[62875]: listener.cb(fileno) [ 1905.882335] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1905.882335] env[62875]: result = function(*args, **kwargs) [ 1905.882335] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1905.882335] env[62875]: return func(*args, **kwargs) [ 1905.882335] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1905.882335] env[62875]: raise e [ 1905.882335] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1905.882335] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1905.882335] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1905.882335] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1905.882335] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1905.882335] env[62875]: with excutils.save_and_reraise_exception(): [ 1905.882335] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1905.882335] env[62875]: self.force_reraise() [ 1905.882335] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1905.882335] env[62875]: raise self.value [ 1905.882335] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1905.882335] env[62875]: updated_port = self._update_port( [ 1905.882335] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1905.882335] env[62875]: _ensure_no_port_binding_failure(port) [ 1905.882335] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1905.882335] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1905.883015] env[62875]: nova.exception.PortBindingFailed: Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. [ 1905.883015] env[62875]: Removing descriptor: 18 [ 1906.225053] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.225583] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1906.228076] env[62875]: INFO nova.compute.manager [-] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Took 1.03 seconds to deallocate network for instance. [ 1906.228574] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.349s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.234976] env[62875]: DEBUG nova.compute.claims [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1906.234976] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.242783] env[62875]: DEBUG oslo_concurrency.lockutils [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] Releasing lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.242783] env[62875]: DEBUG nova.compute.manager [req-48db9c0e-d938-461e-bb3a-2bf9453df48c req-511877f3-95bc-4022-8caf-7e504566d26a service nova] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Received event network-vif-deleted-1be88be5-5769-466a-87e9-5438fd2017fb {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1906.706977] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1906.731038] env[62875]: DEBUG nova.compute.utils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1906.732035] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1906.732255] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1906.821085] env[62875]: DEBUG nova.policy [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd605899c552419cb3981a5317c8fd91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ba7275cd61243598328760bc6875d13', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1906.823974] env[62875]: DEBUG nova.compute.manager [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Received event network-changed-c11dd263-3314-4786-9f42-cda65561833a {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1906.823974] env[62875]: DEBUG nova.compute.manager [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Refreshing instance network info cache due to event network-changed-c11dd263-3314-4786-9f42-cda65561833a. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1906.824228] env[62875]: DEBUG oslo_concurrency.lockutils [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] Acquiring lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.824303] env[62875]: DEBUG oslo_concurrency.lockutils [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] Acquired lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.824462] env[62875]: DEBUG nova.network.neutron [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Refreshing network info cache for port c11dd263-3314-4786-9f42-cda65561833a {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1907.109407] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba76b9e3-9b16-4b10-9a13-9319e3b4b6dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.118519] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705ac085-4fe6-4909-abd3-fc54357596fe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.150866] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a169f1b2-8d1a-49d3-99de-8741b6a410e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.158567] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c55949-69c1-4acd-843b-68488d5da4ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.172347] env[62875]: DEBUG nova.compute.provider_tree [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1907.236498] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1907.343669] env[62875]: DEBUG nova.network.neutron [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1907.356846] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Successfully created port: b8db97ed-983b-4899-9020-c7606d05012c {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1907.414665] env[62875]: DEBUG nova.network.neutron [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.504518] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1907.505081] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1907.505306] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1907.505485] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1907.505672] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1907.505852] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1907.505963] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1907.506193] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1907.506352] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1907.506517] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1907.506677] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1907.506852] env[62875]: DEBUG nova.virt.hardware [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1907.507744] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b3af8f-e2cf-44e3-add6-02d013bbb6db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.515998] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b7afce-594b-45a2-a5d0-b6d940b498c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.530424] env[62875]: ERROR nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] Traceback (most recent call last): [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] yield resources [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self.driver.spawn(context, instance, image_meta, [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] vm_ref = self.build_virtual_machine(instance, [ 1907.530424] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] vif_infos = vmwarevif.get_vif_info(self._session, [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] for vif in network_info: [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] return self._sync_wrapper(fn, *args, **kwargs) [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self.wait() [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self[:] = self._gt.wait() [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] return self._exit_event.wait() [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1907.530783] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] current.throw(*self._exc) [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] result = function(*args, **kwargs) [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] return func(*args, **kwargs) [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] raise e [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] nwinfo = self.network_api.allocate_for_instance( [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] created_port_ids = self._update_ports_for_instance( [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] with excutils.save_and_reraise_exception(): [ 1907.531145] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self.force_reraise() [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] raise self.value [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] updated_port = self._update_port( [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] _ensure_no_port_binding_failure(port) [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] raise exception.PortBindingFailed(port_id=port['id']) [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] nova.exception.PortBindingFailed: Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. [ 1907.531487] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] [ 1907.531487] env[62875]: INFO nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Terminating instance [ 1907.676892] env[62875]: DEBUG nova.scheduler.client.report [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1907.918163] env[62875]: DEBUG oslo_concurrency.lockutils [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] Releasing lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.918163] env[62875]: DEBUG nova.compute.manager [req-c6886bf0-abde-4c01-bab2-693582a2efc2 req-14d2a40b-a88f-4764-a927-c69fb3f7533e service nova] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Received event network-vif-deleted-c11dd263-3314-4786-9f42-cda65561833a {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1908.036381] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Acquiring lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.036381] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Acquired lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.036381] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1908.184017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.953s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.184017] env[62875]: ERROR nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. [ 1908.184017] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] Traceback (most recent call last): [ 1908.184017] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1908.184017] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self.driver.spawn(context, instance, image_meta, [ 1908.184017] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1908.184017] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1908.184017] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1908.184017] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] vm_ref = self.build_virtual_machine(instance, [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] for vif in network_info: [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] return self._sync_wrapper(fn, *args, **kwargs) [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self.wait() [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self[:] = self._gt.wait() [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] return self._exit_event.wait() [ 1908.184415] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] result = hub.switch() [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] return self.greenlet.switch() [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] result = function(*args, **kwargs) [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] return func(*args, **kwargs) [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] raise e [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] nwinfo = self.network_api.allocate_for_instance( [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1908.184725] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] created_port_ids = self._update_ports_for_instance( [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] with excutils.save_and_reraise_exception(): [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] self.force_reraise() [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] raise self.value [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] updated_port = self._update_port( [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] _ensure_no_port_binding_failure(port) [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1908.185055] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] raise exception.PortBindingFailed(port_id=port['id']) [ 1908.185340] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] nova.exception.PortBindingFailed: Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. [ 1908.185340] env[62875]: ERROR nova.compute.manager [instance: cefcbb6a-378b-4927-b115-d648017502e9] [ 1908.185340] env[62875]: DEBUG nova.compute.utils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1908.185904] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Build of instance cefcbb6a-378b-4927-b115-d648017502e9 was re-scheduled: Binding failed for port f322eec3-8899-4969-a7e2-1ae5502072ec, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1908.186468] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1908.186787] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Acquiring lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.187180] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Acquired lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.187498] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1908.188813] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.385s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.245793] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1908.281553] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1908.281805] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1908.281961] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1908.282164] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1908.282311] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1908.282479] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1908.282668] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1908.282826] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1908.282993] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1908.283232] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1908.283356] env[62875]: DEBUG nova.virt.hardware [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1908.284251] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d96f97-e585-4681-9de1-37c980a5c46a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.292724] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d0321d-8f3c-4e15-8597-5a50a52ed849 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.508374] env[62875]: ERROR nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. [ 1908.508374] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1908.508374] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1908.508374] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1908.508374] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1908.508374] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1908.508374] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1908.508374] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1908.508374] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1908.508374] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1908.508374] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1908.508374] env[62875]: ERROR nova.compute.manager raise self.value [ 1908.508374] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1908.508374] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1908.508374] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1908.508374] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1908.508829] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1908.508829] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1908.508829] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. [ 1908.508829] env[62875]: ERROR nova.compute.manager [ 1908.508829] env[62875]: Traceback (most recent call last): [ 1908.508829] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1908.508829] env[62875]: listener.cb(fileno) [ 1908.508829] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1908.508829] env[62875]: result = function(*args, **kwargs) [ 1908.508829] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1908.508829] env[62875]: return func(*args, **kwargs) [ 1908.508829] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1908.508829] env[62875]: raise e [ 1908.508829] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1908.508829] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1908.508829] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1908.508829] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1908.508829] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1908.508829] env[62875]: with excutils.save_and_reraise_exception(): [ 1908.508829] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1908.508829] env[62875]: self.force_reraise() [ 1908.508829] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1908.508829] env[62875]: raise self.value [ 1908.508829] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1908.508829] env[62875]: updated_port = self._update_port( [ 1908.508829] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1908.508829] env[62875]: _ensure_no_port_binding_failure(port) [ 1908.508829] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1908.508829] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1908.509509] env[62875]: nova.exception.PortBindingFailed: Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. [ 1908.509509] env[62875]: Removing descriptor: 18 [ 1908.509509] env[62875]: ERROR nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Traceback (most recent call last): [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] yield resources [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self.driver.spawn(context, instance, image_meta, [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1908.509509] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] vm_ref = self.build_virtual_machine(instance, [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] vif_infos = vmwarevif.get_vif_info(self._session, [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] for vif in network_info: [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] return self._sync_wrapper(fn, *args, **kwargs) [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self.wait() [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self[:] = self._gt.wait() [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] return self._exit_event.wait() [ 1908.509801] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] result = hub.switch() [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] return self.greenlet.switch() [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] result = function(*args, **kwargs) [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] return func(*args, **kwargs) [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] raise e [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] nwinfo = self.network_api.allocate_for_instance( [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1908.510120] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] created_port_ids = self._update_ports_for_instance( [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] with excutils.save_and_reraise_exception(): [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self.force_reraise() [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] raise self.value [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] updated_port = self._update_port( [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] _ensure_no_port_binding_failure(port) [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1908.510425] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] raise exception.PortBindingFailed(port_id=port['id']) [ 1908.510705] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] nova.exception.PortBindingFailed: Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. [ 1908.510705] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] [ 1908.510705] env[62875]: INFO nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Terminating instance [ 1908.557284] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1908.650225] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.725232] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1908.793540] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.867597] env[62875]: DEBUG nova.compute.manager [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Received event network-changed-b8db97ed-983b-4899-9020-c7606d05012c {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1908.867808] env[62875]: DEBUG nova.compute.manager [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Refreshing instance network info cache due to event network-changed-b8db97ed-983b-4899-9020-c7606d05012c. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1908.868084] env[62875]: DEBUG oslo_concurrency.lockutils [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] Acquiring lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.868194] env[62875]: DEBUG oslo_concurrency.lockutils [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] Acquired lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.868466] env[62875]: DEBUG nova.network.neutron [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Refreshing network info cache for port b8db97ed-983b-4899-9020-c7606d05012c {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1909.018102] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Acquiring lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.030156] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c704588-20f7-45e0-b43b-666b1092470a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.037910] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09cfa317-f88d-4281-a911-60329179c244 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.067758] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303fe2f9-7a9a-44ab-83ea-d709a547ba4c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.075028] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e7985d-54ea-4c4d-972b-a01b37b5029c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.088595] env[62875]: DEBUG nova.compute.provider_tree [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1909.152859] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Releasing lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.153471] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1909.153809] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a4d4961-eacf-418e-a181-e2c3ee31a97c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.162792] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e890e89-0e0c-4b42-bc76-a5a8624015ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.186150] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 2896c309-a702-498b-8b44-f01620b597e6 could not be found. [ 1909.186415] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1909.186696] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e20acbd-ddfa-4a84-a8c6-5a4a990ccca9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.194569] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58540f95-0f37-498d-bce8-3f296600b40c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.215948] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2896c309-a702-498b-8b44-f01620b597e6 could not be found. [ 1909.216222] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1909.216479] env[62875]: INFO nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1909.216665] env[62875]: DEBUG oslo.service.loopingcall [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1909.216900] env[62875]: DEBUG nova.compute.manager [-] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1909.216995] env[62875]: DEBUG nova.network.neutron [-] [instance: 2896c309-a702-498b-8b44-f01620b597e6] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1909.246636] env[62875]: DEBUG nova.network.neutron [-] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1909.297633] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Releasing lock "refresh_cache-cefcbb6a-378b-4927-b115-d648017502e9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.297924] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1909.298132] env[62875]: DEBUG nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1909.298356] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1909.313704] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1909.398776] env[62875]: DEBUG nova.network.neutron [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1909.453431] env[62875]: DEBUG nova.network.neutron [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.591553] env[62875]: DEBUG nova.scheduler.client.report [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1909.705894] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1909.706907] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1909.748764] env[62875]: DEBUG nova.network.neutron [-] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.816020] env[62875]: DEBUG nova.network.neutron [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.955977] env[62875]: DEBUG oslo_concurrency.lockutils [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] Releasing lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.956319] env[62875]: DEBUG nova.compute.manager [req-f41ab098-627d-4ea0-af6d-162940758a20 req-64dd76a1-c32e-41b7-bf5e-3fbd1e304a47 service nova] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Received event network-vif-deleted-b8db97ed-983b-4899-9020-c7606d05012c {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1909.956702] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Acquired lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.956890] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1910.096596] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.908s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.097255] env[62875]: ERROR nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Traceback (most recent call last): [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self.driver.spawn(context, instance, image_meta, [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] vm_ref = self.build_virtual_machine(instance, [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] vif_infos = vmwarevif.get_vif_info(self._session, [ 1910.097255] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] for vif in network_info: [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] return self._sync_wrapper(fn, *args, **kwargs) [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self.wait() [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self[:] = self._gt.wait() [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] return self._exit_event.wait() [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] result = hub.switch() [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1910.097589] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] return self.greenlet.switch() [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] result = function(*args, **kwargs) [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] return func(*args, **kwargs) [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] raise e [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] nwinfo = self.network_api.allocate_for_instance( [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] created_port_ids = self._update_ports_for_instance( [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] with excutils.save_and_reraise_exception(): [ 1910.098010] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] self.force_reraise() [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] raise self.value [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] updated_port = self._update_port( [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] _ensure_no_port_binding_failure(port) [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] raise exception.PortBindingFailed(port_id=port['id']) [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] nova.exception.PortBindingFailed: Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. [ 1910.098384] env[62875]: ERROR nova.compute.manager [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] [ 1910.098695] env[62875]: DEBUG nova.compute.utils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1910.099283] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.030s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.102569] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Build of instance 9abafa57-9674-45f7-90cd-f80a8c80b567 was re-scheduled: Binding failed for port c2c906c3-5e7b-42b6-9f48-6fa705fb4733, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1910.103030] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1910.103253] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.103404] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquired lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.103564] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1910.209282] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.252027] env[62875]: INFO nova.compute.manager [-] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Took 1.03 seconds to deallocate network for instance. [ 1910.319078] env[62875]: INFO nova.compute.manager [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] [instance: cefcbb6a-378b-4927-b115-d648017502e9] Took 1.02 seconds to deallocate network for instance. [ 1910.473541] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1910.535480] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.627042] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1910.702246] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.811119] env[62875]: INFO nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Took 0.56 seconds to detach 1 volumes for instance. [ 1910.815485] env[62875]: DEBUG nova.compute.claims [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1910.815669] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.922669] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0120218b-fa70-4109-ab06-8bb3de1d2b5b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.931232] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa52e25-53fc-4c7c-b08d-f0d82c8090d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.961782] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509efc81-2191-4699-964a-1edade4af437 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.969058] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0a87d9-6673-4190-a0bc-21a524d918dc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.986079] env[62875]: DEBUG nova.compute.provider_tree [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1911.038321] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Releasing lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.038758] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1911.039196] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1911.039317] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f01226f0-a277-4ac0-baac-27c6f745ac60 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.047469] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bae86ad-17b8-4af4-a4c2-e39ef8e79f49 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.070776] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 74a1c3db-26b1-426e-be47-621c79ef9029 could not be found. [ 1911.071907] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1911.071907] env[62875]: INFO nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1911.071907] env[62875]: DEBUG oslo.service.loopingcall [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1911.071907] env[62875]: DEBUG nova.compute.manager [-] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1911.071907] env[62875]: DEBUG nova.network.neutron [-] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1911.090834] env[62875]: DEBUG nova.network.neutron [-] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1911.205883] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Releasing lock "refresh_cache-9abafa57-9674-45f7-90cd-f80a8c80b567" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.205883] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1911.206104] env[62875]: DEBUG nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1911.206159] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1911.230334] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1911.352660] env[62875]: INFO nova.scheduler.client.report [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Deleted allocations for instance cefcbb6a-378b-4927-b115-d648017502e9 [ 1911.490025] env[62875]: DEBUG nova.scheduler.client.report [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1911.594534] env[62875]: DEBUG nova.network.neutron [-] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.734068] env[62875]: DEBUG nova.network.neutron [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.865032] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6b299535-e7d0-4816-8dbf-0c1caa7d6ff3 tempest-ImagesOneServerTestJSON-720048063 tempest-ImagesOneServerTestJSON-720048063-project-member] Lock "cefcbb6a-378b-4927-b115-d648017502e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.430s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.993874] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.894s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.994497] env[62875]: ERROR nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Traceback (most recent call last): [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self.driver.spawn(context, instance, image_meta, [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] vm_ref = self.build_virtual_machine(instance, [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] vif_infos = vmwarevif.get_vif_info(self._session, [ 1911.994497] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] for vif in network_info: [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] return self._sync_wrapper(fn, *args, **kwargs) [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self.wait() [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self[:] = self._gt.wait() [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] return self._exit_event.wait() [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] current.throw(*self._exc) [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1911.994856] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] result = function(*args, **kwargs) [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] return func(*args, **kwargs) [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] raise e [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] nwinfo = self.network_api.allocate_for_instance( [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] created_port_ids = self._update_ports_for_instance( [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] with excutils.save_and_reraise_exception(): [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] self.force_reraise() [ 1911.995240] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] raise self.value [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] updated_port = self._update_port( [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] _ensure_no_port_binding_failure(port) [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] raise exception.PortBindingFailed(port_id=port['id']) [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] nova.exception.PortBindingFailed: Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. [ 1911.995660] env[62875]: ERROR nova.compute.manager [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] [ 1911.995660] env[62875]: DEBUG nova.compute.utils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1911.996417] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.673s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.999336] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Build of instance 79d018cc-2400-4925-a09f-e0aaaa8b22db was re-scheduled: Binding failed for port 59ae6b07-482d-4aa5-a5f5-77d100c34f3f, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1911.999760] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1911.999983] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Acquiring lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.000372] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Acquired lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.000505] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.098474] env[62875]: INFO nova.compute.manager [-] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Took 1.03 seconds to deallocate network for instance. [ 1912.100699] env[62875]: DEBUG nova.compute.claims [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1912.100876] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.237545] env[62875]: INFO nova.compute.manager [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: 9abafa57-9674-45f7-90cd-f80a8c80b567] Took 1.03 seconds to deallocate network for instance. [ 1912.367187] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1912.534883] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1912.694853] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.885457] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.913835] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c588945-9a4d-4346-84d1-4d9e92b6ba81 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.921403] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fbefdd-f52c-478f-b82d-1079b682883e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.953313] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e58ef4f-f2c8-4fe1-8da8-dad31453144b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.961607] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2336fd8d-5d13-4775-8bf8-ad558c1646f1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.976533] env[62875]: DEBUG nova.compute.provider_tree [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.197936] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Releasing lock "refresh_cache-79d018cc-2400-4925-a09f-e0aaaa8b22db" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.198302] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1913.198501] env[62875]: DEBUG nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1913.198671] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1913.217716] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1913.267110] env[62875]: INFO nova.scheduler.client.report [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Deleted allocations for instance 9abafa57-9674-45f7-90cd-f80a8c80b567 [ 1913.482330] env[62875]: DEBUG nova.scheduler.client.report [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1913.721034] env[62875]: DEBUG nova.network.neutron [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.775627] env[62875]: DEBUG oslo_concurrency.lockutils [None req-29044a4d-9440-41f1-bf7b-0f77619d20ef tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "9abafa57-9674-45f7-90cd-f80a8c80b567" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.464s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.987536] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.991s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.988212] env[62875]: ERROR nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Traceback (most recent call last): [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self.driver.spawn(context, instance, image_meta, [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] vm_ref = self.build_virtual_machine(instance, [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] vif_infos = vmwarevif.get_vif_info(self._session, [ 1913.988212] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] for vif in network_info: [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] return self._sync_wrapper(fn, *args, **kwargs) [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self.wait() [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self[:] = self._gt.wait() [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] return self._exit_event.wait() [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] result = hub.switch() [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1913.988670] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] return self.greenlet.switch() [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] result = function(*args, **kwargs) [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] return func(*args, **kwargs) [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] raise e [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] nwinfo = self.network_api.allocate_for_instance( [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] created_port_ids = self._update_ports_for_instance( [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] with excutils.save_and_reraise_exception(): [ 1913.988965] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] self.force_reraise() [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] raise self.value [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] updated_port = self._update_port( [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] _ensure_no_port_binding_failure(port) [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] raise exception.PortBindingFailed(port_id=port['id']) [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] nova.exception.PortBindingFailed: Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. [ 1913.989285] env[62875]: ERROR nova.compute.manager [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] [ 1913.989555] env[62875]: DEBUG nova.compute.utils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1913.990142] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.004s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.991586] env[62875]: INFO nova.compute.claims [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1913.994109] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Build of instance 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f was re-scheduled: Binding failed for port 60bd0332-d45a-4167-a467-15d844758709, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1913.995224] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1913.995224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Acquiring lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.995224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Acquired lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.995224] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.223292] env[62875]: INFO nova.compute.manager [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] [instance: 79d018cc-2400-4925-a09f-e0aaaa8b22db] Took 1.02 seconds to deallocate network for instance. [ 1914.277847] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1914.514797] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1914.580693] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.796125] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.955374] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "acc78084-21e8-456c-a573-fc5e931147c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.955634] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "acc78084-21e8-456c-a573-fc5e931147c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.083008] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Releasing lock "refresh_cache-3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.083679] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1915.083679] env[62875]: DEBUG nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1915.083679] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1915.112876] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1915.248809] env[62875]: INFO nova.scheduler.client.report [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Deleted allocations for instance 79d018cc-2400-4925-a09f-e0aaaa8b22db [ 1915.322707] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb814fe-4d76-4864-a36d-c718f5c0b727 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.330632] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7c867b-8ae3-487a-9fcd-b8e8cd6e3a11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.360104] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec60624e-8156-43fe-ab59-ec6e1da2a150 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.367353] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1e1c38-0149-4dcb-bc7e-2a9a489e4026 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.381578] env[62875]: DEBUG nova.compute.provider_tree [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1915.615661] env[62875]: DEBUG nova.network.neutron [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.759491] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2531ae06-c935-4505-a708-623308300555 tempest-VolumesAssistedSnapshotsTest-1571818515 tempest-VolumesAssistedSnapshotsTest-1571818515-project-member] Lock "79d018cc-2400-4925-a09f-e0aaaa8b22db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.695s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.884233] env[62875]: DEBUG nova.scheduler.client.report [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1916.118957] env[62875]: INFO nova.compute.manager [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] [instance: 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f] Took 1.04 seconds to deallocate network for instance. [ 1916.262317] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1916.390393] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.391251] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1916.395120] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.498s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.782861] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.902752] env[62875]: DEBUG nova.compute.utils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1916.905101] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1916.905101] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1916.970472] env[62875]: DEBUG nova.policy [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc126cd4cdcb40f0b365a281c914f1f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec7fa439f1934882bbe528104ae4e3e4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1917.154412] env[62875]: INFO nova.scheduler.client.report [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Deleted allocations for instance 3eea38b9-fe91-43b0-ba7f-6380a6a94c5f [ 1917.276460] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821b22d9-fc46-4070-964a-4c21800237c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.285023] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92681a4b-060e-4137-b2d1-54704b72d669 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.322190] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fa9cc2-7a3c-4457-8d63-a234b4a0ecea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.330324] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf4ba86-3a31-4f51-97c5-22ddf4ea236c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.343724] env[62875]: DEBUG nova.compute.provider_tree [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1917.372800] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Successfully created port: 4d33003e-9d69-4051-87a2-9bc19620ca97 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1917.407856] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1917.665646] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4af396b-2c25-44cd-a5ee-20b596778772 tempest-ServerExternalEventsTest-1452446569 tempest-ServerExternalEventsTest-1452446569-project-member] Lock "3eea38b9-fe91-43b0-ba7f-6380a6a94c5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.912s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.850109] env[62875]: DEBUG nova.scheduler.client.report [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1918.173146] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1918.356019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.958s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.356019] env[62875]: ERROR nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. [ 1918.356019] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Traceback (most recent call last): [ 1918.356019] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1918.356019] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self.driver.spawn(context, instance, image_meta, [ 1918.356019] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1918.356019] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1918.356019] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1918.356019] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] vm_ref = self.build_virtual_machine(instance, [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] vif_infos = vmwarevif.get_vif_info(self._session, [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] for vif in network_info: [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] return self._sync_wrapper(fn, *args, **kwargs) [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self.wait() [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self[:] = self._gt.wait() [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] return self._exit_event.wait() [ 1918.356340] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] result = hub.switch() [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] return self.greenlet.switch() [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] result = function(*args, **kwargs) [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] return func(*args, **kwargs) [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] raise e [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] nwinfo = self.network_api.allocate_for_instance( [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1918.356636] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] created_port_ids = self._update_ports_for_instance( [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] with excutils.save_and_reraise_exception(): [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] self.force_reraise() [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] raise self.value [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] updated_port = self._update_port( [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] _ensure_no_port_binding_failure(port) [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1918.356930] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] raise exception.PortBindingFailed(port_id=port['id']) [ 1918.357218] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] nova.exception.PortBindingFailed: Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. [ 1918.357218] env[62875]: ERROR nova.compute.manager [instance: 4f036275-fd33-440d-acba-6e475cda62c2] [ 1918.357218] env[62875]: DEBUG nova.compute.utils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1918.359833] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Build of instance 4f036275-fd33-440d-acba-6e475cda62c2 was re-scheduled: Binding failed for port 32b18cd3-4268-404f-92d1-574827f7d571, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1918.359833] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1918.359833] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Acquiring lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.359833] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Acquired lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.360071] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1918.360255] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.305s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.422000] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1918.454576] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1918.454823] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1918.454981] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1918.458364] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1918.458364] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1918.458364] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1918.458364] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1918.458364] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1918.458781] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1918.458781] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1918.458781] env[62875]: DEBUG nova.virt.hardware [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1918.458781] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a25cc4a-7090-471b-bb37-f0f6c39e865b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.462904] env[62875]: DEBUG nova.compute.manager [req-20c013c5-49f6-4b48-95aa-ac06647e5daa req-9966ef7d-a7dd-4711-9bed-8ad6ae6ff1f7 service nova] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Received event network-changed-4d33003e-9d69-4051-87a2-9bc19620ca97 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1918.463096] env[62875]: DEBUG nova.compute.manager [req-20c013c5-49f6-4b48-95aa-ac06647e5daa req-9966ef7d-a7dd-4711-9bed-8ad6ae6ff1f7 service nova] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Refreshing instance network info cache due to event network-changed-4d33003e-9d69-4051-87a2-9bc19620ca97. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1918.463319] env[62875]: DEBUG oslo_concurrency.lockutils [req-20c013c5-49f6-4b48-95aa-ac06647e5daa req-9966ef7d-a7dd-4711-9bed-8ad6ae6ff1f7 service nova] Acquiring lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.463464] env[62875]: DEBUG oslo_concurrency.lockutils [req-20c013c5-49f6-4b48-95aa-ac06647e5daa req-9966ef7d-a7dd-4711-9bed-8ad6ae6ff1f7 service nova] Acquired lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.463690] env[62875]: DEBUG nova.network.neutron [req-20c013c5-49f6-4b48-95aa-ac06647e5daa req-9966ef7d-a7dd-4711-9bed-8ad6ae6ff1f7 service nova] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Refreshing network info cache for port 4d33003e-9d69-4051-87a2-9bc19620ca97 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1918.472163] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab1fc4a-24ab-403e-99b9-e8e950a0e78d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.697466] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.746591] env[62875]: ERROR nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. [ 1918.746591] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1918.746591] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1918.746591] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1918.746591] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1918.746591] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1918.746591] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1918.746591] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1918.746591] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.746591] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1918.746591] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.746591] env[62875]: ERROR nova.compute.manager raise self.value [ 1918.746591] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1918.746591] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1918.746591] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1918.746591] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1918.747117] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1918.747117] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1918.747117] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. [ 1918.747117] env[62875]: ERROR nova.compute.manager [ 1918.747117] env[62875]: Traceback (most recent call last): [ 1918.747117] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1918.747117] env[62875]: listener.cb(fileno) [ 1918.747117] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1918.747117] env[62875]: result = function(*args, **kwargs) [ 1918.747117] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1918.747117] env[62875]: return func(*args, **kwargs) [ 1918.747117] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1918.747117] env[62875]: raise e [ 1918.747117] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1918.747117] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1918.747117] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1918.747117] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1918.747117] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1918.747117] env[62875]: with excutils.save_and_reraise_exception(): [ 1918.747117] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.747117] env[62875]: self.force_reraise() [ 1918.747117] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.747117] env[62875]: raise self.value [ 1918.747117] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1918.747117] env[62875]: updated_port = self._update_port( [ 1918.747117] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1918.747117] env[62875]: _ensure_no_port_binding_failure(port) [ 1918.747117] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1918.747117] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1918.747782] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. [ 1918.747782] env[62875]: Removing descriptor: 18 [ 1918.748243] env[62875]: ERROR nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Traceback (most recent call last): [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] yield resources [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self.driver.spawn(context, instance, image_meta, [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] vm_ref = self.build_virtual_machine(instance, [ 1918.748243] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] vif_infos = vmwarevif.get_vif_info(self._session, [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] for vif in network_info: [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] return self._sync_wrapper(fn, *args, **kwargs) [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self.wait() [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self[:] = self._gt.wait() [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] return self._exit_event.wait() [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1918.748830] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] result = hub.switch() [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] return self.greenlet.switch() [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] result = function(*args, **kwargs) [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] return func(*args, **kwargs) [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] raise e [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] nwinfo = self.network_api.allocate_for_instance( [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] created_port_ids = self._update_ports_for_instance( [ 1918.749713] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] with excutils.save_and_reraise_exception(): [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self.force_reraise() [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] raise self.value [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] updated_port = self._update_port( [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] _ensure_no_port_binding_failure(port) [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] raise exception.PortBindingFailed(port_id=port['id']) [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] nova.exception.PortBindingFailed: Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. [ 1918.750472] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] [ 1918.751342] env[62875]: INFO nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Terminating instance [ 1918.901091] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1918.992229] env[62875]: DEBUG nova.network.neutron [req-20c013c5-49f6-4b48-95aa-ac06647e5daa req-9966ef7d-a7dd-4711-9bed-8ad6ae6ff1f7 service nova] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1919.030771] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.184220] env[62875]: DEBUG nova.network.neutron [req-20c013c5-49f6-4b48-95aa-ac06647e5daa req-9966ef7d-a7dd-4711-9bed-8ad6ae6ff1f7 service nova] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.224464] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f4b557-4010-43bb-a8c5-607f8b3df1b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.232585] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4333354-65a8-4518-b513-a4f6f310d9c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.264858] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Acquiring lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.265549] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4519a8-d7e5-41f1-88fd-15d7cc2ba42a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.273945] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc890c1-e57a-46f9-8625-ba30287c5044 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.291514] env[62875]: DEBUG nova.compute.provider_tree [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.534288] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Releasing lock "refresh_cache-4f036275-fd33-440d-acba-6e475cda62c2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.534529] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1919.534712] env[62875]: DEBUG nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1919.534876] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1919.553734] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1919.686034] env[62875]: DEBUG oslo_concurrency.lockutils [req-20c013c5-49f6-4b48-95aa-ac06647e5daa req-9966ef7d-a7dd-4711-9bed-8ad6ae6ff1f7 service nova] Releasing lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.686479] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Acquired lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.686674] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1919.795866] env[62875]: DEBUG nova.scheduler.client.report [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1920.054846] env[62875]: DEBUG nova.network.neutron [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.212369] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1920.305068] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.945s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.305762] env[62875]: ERROR nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Traceback (most recent call last): [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self.driver.spawn(context, instance, image_meta, [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] vm_ref = self.build_virtual_machine(instance, [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] vif_infos = vmwarevif.get_vif_info(self._session, [ 1920.305762] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] for vif in network_info: [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] return self._sync_wrapper(fn, *args, **kwargs) [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self.wait() [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self[:] = self._gt.wait() [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] return self._exit_event.wait() [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] result = hub.switch() [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1920.306375] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] return self.greenlet.switch() [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] result = function(*args, **kwargs) [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] return func(*args, **kwargs) [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] raise e [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] nwinfo = self.network_api.allocate_for_instance( [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] created_port_ids = self._update_ports_for_instance( [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] with excutils.save_and_reraise_exception(): [ 1920.306914] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] self.force_reraise() [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] raise self.value [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] updated_port = self._update_port( [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] _ensure_no_port_binding_failure(port) [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] raise exception.PortBindingFailed(port_id=port['id']) [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] nova.exception.PortBindingFailed: Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. [ 1920.307934] env[62875]: ERROR nova.compute.manager [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] [ 1920.309869] env[62875]: DEBUG nova.compute.utils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1920.309869] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.073s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.312116] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Build of instance edf6724b-3a8c-4c19-926e-8f1b081ab50f was re-scheduled: Binding failed for port 8b1988e9-0bbf-49e4-989b-e71d3cfcf452, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1920.312737] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1920.312973] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Acquiring lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.313136] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Acquired lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.313294] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1920.465319] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.494296] env[62875]: DEBUG nova.compute.manager [req-fd575ac1-0e3c-4f0b-b72b-74eadeac841e req-71fabc28-58fb-474f-8916-cde56d57d99a service nova] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Received event network-vif-deleted-4d33003e-9d69-4051-87a2-9bc19620ca97 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1920.559965] env[62875]: INFO nova.compute.manager [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] [instance: 4f036275-fd33-440d-acba-6e475cda62c2] Took 1.02 seconds to deallocate network for instance. [ 1920.833379] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1920.927682] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.972039] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Releasing lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.972642] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1920.972881] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1920.973632] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c3a3972-5eff-4c1d-9c01-dcd1ab39d8ab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.984271] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955a22a8-7593-4e17-811c-56bf05861d46 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.011690] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ec4405a5-de44-4749-9225-3945db05ca6e could not be found. [ 1921.011931] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1921.012218] env[62875]: INFO nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1921.012490] env[62875]: DEBUG oslo.service.loopingcall [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1921.012724] env[62875]: DEBUG nova.compute.manager [-] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1921.012823] env[62875]: DEBUG nova.network.neutron [-] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1921.032580] env[62875]: DEBUG nova.network.neutron [-] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1921.222390] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856b94be-146f-459f-818b-b48108836166 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.230611] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb8172c-6182-45d9-9bee-29b9bc522165 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.263609] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c58b6c-d7cf-499f-bb69-f76dc8aae1c6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.271690] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29457930-9b67-4d62-bfd2-f0c95ef28246 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.285036] env[62875]: DEBUG nova.compute.provider_tree [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1921.430233] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Releasing lock "refresh_cache-edf6724b-3a8c-4c19-926e-8f1b081ab50f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.430476] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1921.430663] env[62875]: DEBUG nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1921.430831] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1921.458861] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1921.535200] env[62875]: DEBUG nova.network.neutron [-] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.601198] env[62875]: INFO nova.scheduler.client.report [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Deleted allocations for instance 4f036275-fd33-440d-acba-6e475cda62c2 [ 1921.789018] env[62875]: DEBUG nova.scheduler.client.report [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1921.962141] env[62875]: DEBUG nova.network.neutron [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.037952] env[62875]: INFO nova.compute.manager [-] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Took 1.02 seconds to deallocate network for instance. [ 1922.041464] env[62875]: DEBUG nova.compute.claims [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1922.041649] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.110419] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6259c2dc-3e00-4fe9-a954-d5b9f4b02adc tempest-ServersWithSpecificFlavorTestJSON-1093263874 tempest-ServersWithSpecificFlavorTestJSON-1093263874-project-member] Lock "4f036275-fd33-440d-acba-6e475cda62c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.162s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.293898] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.986s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.294698] env[62875]: ERROR nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Traceback (most recent call last): [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self.driver.spawn(context, instance, image_meta, [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] vm_ref = self.build_virtual_machine(instance, [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] vif_infos = vmwarevif.get_vif_info(self._session, [ 1922.294698] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] for vif in network_info: [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] return self._sync_wrapper(fn, *args, **kwargs) [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self.wait() [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self[:] = self._gt.wait() [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] return self._exit_event.wait() [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] result = hub.switch() [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1922.295028] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] return self.greenlet.switch() [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] result = function(*args, **kwargs) [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] return func(*args, **kwargs) [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] raise e [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] nwinfo = self.network_api.allocate_for_instance( [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] created_port_ids = self._update_ports_for_instance( [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] with excutils.save_and_reraise_exception(): [ 1922.295325] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] self.force_reraise() [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] raise self.value [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] updated_port = self._update_port( [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] _ensure_no_port_binding_failure(port) [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] raise exception.PortBindingFailed(port_id=port['id']) [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] nova.exception.PortBindingFailed: Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. [ 1922.295635] env[62875]: ERROR nova.compute.manager [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] [ 1922.295953] env[62875]: DEBUG nova.compute.utils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1922.298921] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.090s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.299122] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.299297] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1922.299622] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.484s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.307408] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Build of instance 820d7177-3e8f-4dd7-b7c3-c7abd4a62158 was re-scheduled: Binding failed for port 1be88be5-5769-466a-87e9-5438fd2017fb, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1922.307907] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1922.308175] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.308318] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.308556] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1922.310176] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2005557-e9d0-4709-9a79-3c1dbc8e065a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.319374] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220d708e-1bc7-4f74-94b3-77cf0504870d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.334446] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6632a41-e37f-4938-9bab-0f35bbbb1dd7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.341736] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458f6b20-3ddc-40e0-9ba3-bf2d23b192e9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.375279] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181275MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1922.375482] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.470509] env[62875]: INFO nova.compute.manager [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] [instance: edf6724b-3a8c-4c19-926e-8f1b081ab50f] Took 1.04 seconds to deallocate network for instance. [ 1922.614213] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1922.837705] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1922.965330] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.135331] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.237226] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488a6b83-3e31-40fb-8422-6e45eef87fac {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.244890] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9f3d7f-caf6-4406-999c-7993b13f2869 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.279579] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3726773f-7c85-4b5d-861f-4bfa7c48c30e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.287575] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b120ab-113d-40d9-9c34-329baf551302 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.305134] env[62875]: DEBUG nova.compute.provider_tree [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1923.472518] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-820d7177-3e8f-4dd7-b7c3-c7abd4a62158" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.472776] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1923.472963] env[62875]: DEBUG nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1923.473149] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1923.497897] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1923.515361] env[62875]: INFO nova.scheduler.client.report [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Deleted allocations for instance edf6724b-3a8c-4c19-926e-8f1b081ab50f [ 1923.811692] env[62875]: DEBUG nova.scheduler.client.report [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1924.000583] env[62875]: DEBUG nova.network.neutron [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.027190] env[62875]: DEBUG oslo_concurrency.lockutils [None req-37b5a3e9-25a3-4235-8476-858c1541ba9d tempest-ServersTestFqdnHostnames-1973683280 tempest-ServersTestFqdnHostnames-1973683280-project-member] Lock "edf6724b-3a8c-4c19-926e-8f1b081ab50f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.893s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.318465] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.017s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.318465] env[62875]: ERROR nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. [ 1924.318465] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] Traceback (most recent call last): [ 1924.318465] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1924.318465] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self.driver.spawn(context, instance, image_meta, [ 1924.318465] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1924.318465] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1924.318465] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1924.318465] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] vm_ref = self.build_virtual_machine(instance, [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] vif_infos = vmwarevif.get_vif_info(self._session, [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] for vif in network_info: [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] return self._sync_wrapper(fn, *args, **kwargs) [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self.wait() [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self[:] = self._gt.wait() [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] return self._exit_event.wait() [ 1924.319137] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] current.throw(*self._exc) [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] result = function(*args, **kwargs) [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] return func(*args, **kwargs) [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] raise e [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] nwinfo = self.network_api.allocate_for_instance( [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] created_port_ids = self._update_ports_for_instance( [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1924.319500] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] with excutils.save_and_reraise_exception(): [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] self.force_reraise() [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] raise self.value [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] updated_port = self._update_port( [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] _ensure_no_port_binding_failure(port) [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] raise exception.PortBindingFailed(port_id=port['id']) [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] nova.exception.PortBindingFailed: Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. [ 1924.319827] env[62875]: ERROR nova.compute.manager [instance: 2896c309-a702-498b-8b44-f01620b597e6] [ 1924.320224] env[62875]: DEBUG nova.compute.utils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1924.320224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.219s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.325679] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Build of instance 2896c309-a702-498b-8b44-f01620b597e6 was re-scheduled: Binding failed for port c11dd263-3314-4786-9f42-cda65561833a, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1924.326523] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1924.326523] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Acquiring lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.326523] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Acquired lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.326623] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1924.503729] env[62875]: INFO nova.compute.manager [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 820d7177-3e8f-4dd7-b7c3-c7abd4a62158] Took 1.03 seconds to deallocate network for instance. [ 1924.532601] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1924.864954] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1924.867088] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquiring lock "a64253fe-4ba9-4686-810b-a26a4c29631b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.867323] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "a64253fe-4ba9-4686-810b-a26a4c29631b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.971887] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.063916] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.204161] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569e580a-3ff2-42cc-badd-47e7f0b55e53 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.213497] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a42b8e-a7b3-4a1e-95c1-bedfe4118ada {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.247469] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258c912e-c8fb-4792-bdfa-a2811966d8a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.255586] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117f6f0c-34d5-42b8-8032-d50fa69e17b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.269984] env[62875]: DEBUG nova.compute.provider_tree [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1925.478229] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Releasing lock "refresh_cache-2896c309-a702-498b-8b44-f01620b597e6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.478533] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1925.478906] env[62875]: DEBUG nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1925.478906] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1925.505317] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1925.549040] env[62875]: INFO nova.scheduler.client.report [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted allocations for instance 820d7177-3e8f-4dd7-b7c3-c7abd4a62158 [ 1925.776774] env[62875]: DEBUG nova.scheduler.client.report [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1926.008927] env[62875]: DEBUG nova.network.neutron [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.066015] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f4e58f15-7172-4e97-8501-a9e9cc7b15ac tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "820d7177-3e8f-4dd7-b7c3-c7abd4a62158" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.573s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.283970] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.964s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.284714] env[62875]: ERROR nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Traceback (most recent call last): [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self.driver.spawn(context, instance, image_meta, [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] vm_ref = self.build_virtual_machine(instance, [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] vif_infos = vmwarevif.get_vif_info(self._session, [ 1926.284714] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] for vif in network_info: [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] return self._sync_wrapper(fn, *args, **kwargs) [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self.wait() [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self[:] = self._gt.wait() [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] return self._exit_event.wait() [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] result = hub.switch() [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1926.285050] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] return self.greenlet.switch() [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] result = function(*args, **kwargs) [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] return func(*args, **kwargs) [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] raise e [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] nwinfo = self.network_api.allocate_for_instance( [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] created_port_ids = self._update_ports_for_instance( [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] with excutils.save_and_reraise_exception(): [ 1926.285351] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] self.force_reraise() [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] raise self.value [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] updated_port = self._update_port( [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] _ensure_no_port_binding_failure(port) [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] raise exception.PortBindingFailed(port_id=port['id']) [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] nova.exception.PortBindingFailed: Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. [ 1926.285724] env[62875]: ERROR nova.compute.manager [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] [ 1926.285994] env[62875]: DEBUG nova.compute.utils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1926.290599] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.402s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.290599] env[62875]: INFO nova.compute.claims [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1926.292161] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Build of instance 74a1c3db-26b1-426e-be47-621c79ef9029 was re-scheduled: Binding failed for port b8db97ed-983b-4899-9020-c7606d05012c, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1926.292600] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1926.292854] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Acquiring lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.293375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Acquired lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.293375] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1926.512469] env[62875]: INFO nova.compute.manager [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] [instance: 2896c309-a702-498b-8b44-f01620b597e6] Took 1.03 seconds to deallocate network for instance. [ 1926.573703] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1926.829677] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1926.889105] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.107947] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.391259] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Releasing lock "refresh_cache-74a1c3db-26b1-426e-be47-621c79ef9029" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.391485] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1927.391696] env[62875]: DEBUG nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1927.391808] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1927.416273] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1927.544411] env[62875]: INFO nova.scheduler.client.report [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Deleted allocations for instance 2896c309-a702-498b-8b44-f01620b597e6 [ 1927.696855] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0614c113-ab35-4a40-b2a1-e67ab3e3de41 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.704918] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609b0c1e-dc10-467b-af96-f8590e7da9f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.741657] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57f2b82-f9a0-499c-bd3b-871ee66d257f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.750230] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82334c48-7a8b-4d76-85a7-d0591064cb61 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.764882] env[62875]: DEBUG nova.compute.provider_tree [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.920570] env[62875]: DEBUG nova.network.neutron [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.045496] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "305aebbe-f983-4826-b8c0-9854458f7d48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.045729] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.056175] env[62875]: DEBUG oslo_concurrency.lockutils [None req-efa6222b-4709-44c2-9955-d232c6d5c304 tempest-ServersTestBootFromVolume-1104058832 tempest-ServersTestBootFromVolume-1104058832-project-member] Lock "2896c309-a702-498b-8b44-f01620b597e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.561s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.267977] env[62875]: DEBUG nova.scheduler.client.report [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1928.426408] env[62875]: INFO nova.compute.manager [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] [instance: 74a1c3db-26b1-426e-be47-621c79ef9029] Took 1.03 seconds to deallocate network for instance. [ 1928.559081] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1928.772952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.773523] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1928.775977] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.980s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.777780] env[62875]: INFO nova.compute.claims [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1929.084777] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.282389] env[62875]: DEBUG nova.compute.utils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1929.285648] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1929.285817] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1929.345030] env[62875]: DEBUG nova.policy [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3d0e175791341aea0db00ef8a1b5680', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '226340868e7446cca12688a32d13c630', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1929.456844] env[62875]: INFO nova.scheduler.client.report [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Deleted allocations for instance 74a1c3db-26b1-426e-be47-621c79ef9029 [ 1929.786402] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1929.967522] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9be4123f-75da-4052-bb86-ee611ce698b2 tempest-ServerDiagnosticsNegativeTest-477409887 tempest-ServerDiagnosticsNegativeTest-477409887-project-member] Lock "74a1c3db-26b1-426e-be47-621c79ef9029" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.615s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.994904] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Successfully created port: 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1930.189077] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7a0d80-e28a-44dc-9831-f07fdf37037f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.197186] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0f5315-2f60-4c90-82b3-9e8cedf921a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.233723] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01657db3-e21e-4395-bdec-3613368636e4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.241460] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa52903e-134f-4683-9109-24c4be63c2c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.256717] env[62875]: DEBUG nova.compute.provider_tree [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1930.473020] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1930.760115] env[62875]: DEBUG nova.scheduler.client.report [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1930.799874] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1930.827217] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1930.827470] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1930.827627] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1930.827806] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1930.827949] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1930.828114] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1930.831351] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1930.834527] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1930.834527] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1930.834527] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1930.834527] env[62875]: DEBUG nova.virt.hardware [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1930.834527] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be592f57-d71c-42fb-84f5-9853acc95062 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.843943] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91dbdc4c-5273-480e-8d4c-2d5b356783a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.015072] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.271166] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.271166] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1931.273325] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.491s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.277205] env[62875]: INFO nova.compute.claims [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1931.785853] env[62875]: DEBUG nova.compute.utils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1931.791281] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1931.791676] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1931.923957] env[62875]: DEBUG nova.policy [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '745f4fc89adc4edf8866270f43541b57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '645131c9b9d14a4ea8a70b26943ab45d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1932.294797] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1932.465393] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Successfully created port: 0d3882f3-fd7b-43e1-a099-329e33bc3f23 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1932.683463] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7bd869-06a1-4661-9b31-c378153a5b0f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.692498] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a49443d-9840-4efb-b571-04ffad9945f7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.726598] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6f3055-5d74-49ce-8227-57a3bda79b1f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.734299] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552b07bd-7f01-4014-ba4e-71aef700458c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.750041] env[62875]: DEBUG nova.compute.provider_tree [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1933.072923] env[62875]: DEBUG nova.compute.manager [req-0b5fd615-3b45-4d20-8e5e-40b524bf4978 req-1dbb86ee-8274-428f-bb06-e538aa23a7a2 service nova] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Received event network-changed-3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1933.072923] env[62875]: DEBUG nova.compute.manager [req-0b5fd615-3b45-4d20-8e5e-40b524bf4978 req-1dbb86ee-8274-428f-bb06-e538aa23a7a2 service nova] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Refreshing instance network info cache due to event network-changed-3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1933.072923] env[62875]: DEBUG oslo_concurrency.lockutils [req-0b5fd615-3b45-4d20-8e5e-40b524bf4978 req-1dbb86ee-8274-428f-bb06-e538aa23a7a2 service nova] Acquiring lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.072923] env[62875]: DEBUG oslo_concurrency.lockutils [req-0b5fd615-3b45-4d20-8e5e-40b524bf4978 req-1dbb86ee-8274-428f-bb06-e538aa23a7a2 service nova] Acquired lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.073939] env[62875]: DEBUG nova.network.neutron [req-0b5fd615-3b45-4d20-8e5e-40b524bf4978 req-1dbb86ee-8274-428f-bb06-e538aa23a7a2 service nova] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Refreshing network info cache for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1933.172846] env[62875]: ERROR nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. [ 1933.172846] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1933.172846] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1933.172846] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1933.172846] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1933.172846] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1933.172846] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1933.172846] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1933.172846] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1933.172846] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1933.172846] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1933.172846] env[62875]: ERROR nova.compute.manager raise self.value [ 1933.172846] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1933.172846] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1933.172846] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1933.172846] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1933.173796] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1933.173796] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1933.173796] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. [ 1933.173796] env[62875]: ERROR nova.compute.manager [ 1933.173796] env[62875]: Traceback (most recent call last): [ 1933.173796] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1933.173796] env[62875]: listener.cb(fileno) [ 1933.173796] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1933.173796] env[62875]: result = function(*args, **kwargs) [ 1933.173796] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1933.173796] env[62875]: return func(*args, **kwargs) [ 1933.173796] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1933.173796] env[62875]: raise e [ 1933.173796] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1933.173796] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1933.173796] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1933.173796] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1933.173796] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1933.173796] env[62875]: with excutils.save_and_reraise_exception(): [ 1933.173796] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1933.173796] env[62875]: self.force_reraise() [ 1933.173796] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1933.173796] env[62875]: raise self.value [ 1933.173796] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1933.173796] env[62875]: updated_port = self._update_port( [ 1933.173796] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1933.173796] env[62875]: _ensure_no_port_binding_failure(port) [ 1933.173796] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1933.173796] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1933.174505] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. [ 1933.174505] env[62875]: Removing descriptor: 21 [ 1933.174505] env[62875]: ERROR nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Traceback (most recent call last): [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] yield resources [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self.driver.spawn(context, instance, image_meta, [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1933.174505] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] vm_ref = self.build_virtual_machine(instance, [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] vif_infos = vmwarevif.get_vif_info(self._session, [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] for vif in network_info: [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] return self._sync_wrapper(fn, *args, **kwargs) [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self.wait() [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self[:] = self._gt.wait() [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] return self._exit_event.wait() [ 1933.174800] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] result = hub.switch() [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] return self.greenlet.switch() [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] result = function(*args, **kwargs) [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] return func(*args, **kwargs) [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] raise e [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] nwinfo = self.network_api.allocate_for_instance( [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1933.175324] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] created_port_ids = self._update_ports_for_instance( [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] with excutils.save_and_reraise_exception(): [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self.force_reraise() [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] raise self.value [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] updated_port = self._update_port( [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] _ensure_no_port_binding_failure(port) [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1933.176138] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] raise exception.PortBindingFailed(port_id=port['id']) [ 1933.176581] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] nova.exception.PortBindingFailed: Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. [ 1933.176581] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] [ 1933.176581] env[62875]: INFO nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Terminating instance [ 1933.252988] env[62875]: DEBUG nova.scheduler.client.report [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1933.311586] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1933.349090] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1933.350098] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1933.350098] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1933.350098] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1933.350098] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1933.350381] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1933.350619] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1933.351246] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1933.351246] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1933.351246] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1933.353123] env[62875]: DEBUG nova.virt.hardware [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1933.354514] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6ecbd4-8aa0-4bd9-866d-18906194744c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.365117] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279ad41f-fac1-4ff0-9aab-54e6ffb31ca9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.610229] env[62875]: DEBUG nova.network.neutron [req-0b5fd615-3b45-4d20-8e5e-40b524bf4978 req-1dbb86ee-8274-428f-bb06-e538aa23a7a2 service nova] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1933.680856] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.762047] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.762612] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1933.766072] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.069s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.771246] env[62875]: INFO nova.compute.claims [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1933.835550] env[62875]: DEBUG nova.network.neutron [req-0b5fd615-3b45-4d20-8e5e-40b524bf4978 req-1dbb86ee-8274-428f-bb06-e538aa23a7a2 service nova] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.906557] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquiring lock "c1e107cd-5c03-405f-bdae-3281dc4844d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.907048] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "c1e107cd-5c03-405f-bdae-3281dc4844d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.123317] env[62875]: DEBUG nova.compute.manager [req-b2a5222f-d513-404b-a332-361bdb31e148 req-9121634a-879f-46d4-ab1e-f6e364fcc715 service nova] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Received event network-changed-0d3882f3-fd7b-43e1-a099-329e33bc3f23 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1934.123567] env[62875]: DEBUG nova.compute.manager [req-b2a5222f-d513-404b-a332-361bdb31e148 req-9121634a-879f-46d4-ab1e-f6e364fcc715 service nova] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Refreshing instance network info cache due to event network-changed-0d3882f3-fd7b-43e1-a099-329e33bc3f23. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1934.123743] env[62875]: DEBUG oslo_concurrency.lockutils [req-b2a5222f-d513-404b-a332-361bdb31e148 req-9121634a-879f-46d4-ab1e-f6e364fcc715 service nova] Acquiring lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.123856] env[62875]: DEBUG oslo_concurrency.lockutils [req-b2a5222f-d513-404b-a332-361bdb31e148 req-9121634a-879f-46d4-ab1e-f6e364fcc715 service nova] Acquired lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.124355] env[62875]: DEBUG nova.network.neutron [req-b2a5222f-d513-404b-a332-361bdb31e148 req-9121634a-879f-46d4-ab1e-f6e364fcc715 service nova] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Refreshing network info cache for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1934.281298] env[62875]: DEBUG nova.compute.utils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1934.286223] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1934.290303] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1934.340573] env[62875]: DEBUG oslo_concurrency.lockutils [req-0b5fd615-3b45-4d20-8e5e-40b524bf4978 req-1dbb86ee-8274-428f-bb06-e538aa23a7a2 service nova] Releasing lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.340573] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.340786] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1934.363582] env[62875]: ERROR nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. [ 1934.363582] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1934.363582] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1934.363582] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1934.363582] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1934.363582] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1934.363582] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1934.363582] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1934.363582] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.363582] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1934.363582] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.363582] env[62875]: ERROR nova.compute.manager raise self.value [ 1934.363582] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1934.363582] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1934.363582] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1934.363582] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1934.364085] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1934.364085] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1934.364085] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. [ 1934.364085] env[62875]: ERROR nova.compute.manager [ 1934.364085] env[62875]: Traceback (most recent call last): [ 1934.364085] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1934.364085] env[62875]: listener.cb(fileno) [ 1934.364085] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1934.364085] env[62875]: result = function(*args, **kwargs) [ 1934.364085] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1934.364085] env[62875]: return func(*args, **kwargs) [ 1934.364085] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1934.364085] env[62875]: raise e [ 1934.364085] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1934.364085] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1934.364085] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1934.364085] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1934.364085] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1934.364085] env[62875]: with excutils.save_and_reraise_exception(): [ 1934.364085] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.364085] env[62875]: self.force_reraise() [ 1934.364085] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.364085] env[62875]: raise self.value [ 1934.364085] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1934.364085] env[62875]: updated_port = self._update_port( [ 1934.364085] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1934.364085] env[62875]: _ensure_no_port_binding_failure(port) [ 1934.364085] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1934.364085] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1934.364987] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. [ 1934.364987] env[62875]: Removing descriptor: 18 [ 1934.364987] env[62875]: ERROR nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Traceback (most recent call last): [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] yield resources [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self.driver.spawn(context, instance, image_meta, [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1934.364987] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] vm_ref = self.build_virtual_machine(instance, [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] vif_infos = vmwarevif.get_vif_info(self._session, [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] for vif in network_info: [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] return self._sync_wrapper(fn, *args, **kwargs) [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self.wait() [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self[:] = self._gt.wait() [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] return self._exit_event.wait() [ 1934.365371] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] result = hub.switch() [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] return self.greenlet.switch() [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] result = function(*args, **kwargs) [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] return func(*args, **kwargs) [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] raise e [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] nwinfo = self.network_api.allocate_for_instance( [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1934.365672] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] created_port_ids = self._update_ports_for_instance( [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] with excutils.save_and_reraise_exception(): [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self.force_reraise() [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] raise self.value [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] updated_port = self._update_port( [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] _ensure_no_port_binding_failure(port) [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1934.365975] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] raise exception.PortBindingFailed(port_id=port['id']) [ 1934.366318] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] nova.exception.PortBindingFailed: Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. [ 1934.366318] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] [ 1934.366318] env[62875]: INFO nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Terminating instance [ 1934.405258] env[62875]: DEBUG nova.policy [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f31044c31f04b07a5e4657a6ef015f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67b77a82540742a2a56216fd1d3a990e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1934.663160] env[62875]: DEBUG nova.network.neutron [req-b2a5222f-d513-404b-a332-361bdb31e148 req-9121634a-879f-46d4-ab1e-f6e364fcc715 service nova] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1934.731980] env[62875]: DEBUG nova.network.neutron [req-b2a5222f-d513-404b-a332-361bdb31e148 req-9121634a-879f-46d4-ab1e-f6e364fcc715 service nova] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.785980] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1934.871430] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.898405] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Successfully created port: 1b726078-d307-4a6d-b485-0a72664ca270 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1934.919611] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1935.160359] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.235063] env[62875]: DEBUG oslo_concurrency.lockutils [req-b2a5222f-d513-404b-a332-361bdb31e148 req-9121634a-879f-46d4-ab1e-f6e364fcc715 service nova] Releasing lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.235469] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquired lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.235656] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1935.253892] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cde9790-dd24-4ef2-89f6-e988ee057969 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.264767] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38a0d89-3f9b-4981-bbdd-58b2847c9ffe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.310932] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1648821b-481a-4841-ae2b-42b43ab62902 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.320930] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4dedf5-17db-4f8a-9028-a91c51f222de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.345396] env[62875]: DEBUG nova.compute.provider_tree [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1935.553824] env[62875]: DEBUG nova.compute.manager [req-5e3dee69-9782-4298-887c-ad24322d73ff req-bfc9072f-18d2-4623-a273-29082823baa8 service nova] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Received event network-vif-deleted-3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1935.669109] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.669109] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1935.669109] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1935.669109] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46b1263d-3579-4655-a411-f0c07ab53da2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.681244] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d735f71-e050-4e1b-b907-9abd7d6b00a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.704728] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d241bf8-6f7a-467f-9640-a9819d5cca72 could not be found. [ 1935.704954] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1935.705168] env[62875]: INFO nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1935.705408] env[62875]: DEBUG oslo.service.loopingcall [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1935.705635] env[62875]: DEBUG nova.compute.manager [-] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1935.705723] env[62875]: DEBUG nova.network.neutron [-] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1935.728306] env[62875]: DEBUG nova.network.neutron [-] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1935.779495] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1935.811958] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1935.840857] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1935.840857] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1935.841142] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1935.841142] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1935.841249] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1935.842542] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1935.842542] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1935.842542] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1935.842542] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1935.842542] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1935.842869] env[62875]: DEBUG nova.virt.hardware [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1935.844540] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4476485f-91d0-4cb3-9573-fe40a39f9df5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.851743] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf0d081-df34-41a2-b33e-b42bc926ce1b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.856784] env[62875]: DEBUG nova.scheduler.client.report [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1935.912897] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.981441] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquiring lock "37ae8e69-f953-4846-8a21-fed697ea575a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.981441] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "37ae8e69-f953-4846-8a21-fed697ea575a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.112156] env[62875]: ERROR nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. [ 1936.112156] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1936.112156] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1936.112156] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1936.112156] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1936.112156] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1936.112156] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1936.112156] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1936.112156] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1936.112156] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1936.112156] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1936.112156] env[62875]: ERROR nova.compute.manager raise self.value [ 1936.112156] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1936.112156] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1936.112156] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1936.112156] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1936.112573] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1936.112573] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1936.112573] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. [ 1936.112573] env[62875]: ERROR nova.compute.manager [ 1936.112573] env[62875]: Traceback (most recent call last): [ 1936.112573] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1936.112573] env[62875]: listener.cb(fileno) [ 1936.112573] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1936.112573] env[62875]: result = function(*args, **kwargs) [ 1936.112573] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1936.112573] env[62875]: return func(*args, **kwargs) [ 1936.112573] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1936.112573] env[62875]: raise e [ 1936.112573] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1936.112573] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1936.112573] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1936.112573] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1936.112573] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1936.112573] env[62875]: with excutils.save_and_reraise_exception(): [ 1936.112573] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1936.112573] env[62875]: self.force_reraise() [ 1936.112573] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1936.112573] env[62875]: raise self.value [ 1936.112573] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1936.112573] env[62875]: updated_port = self._update_port( [ 1936.112573] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1936.112573] env[62875]: _ensure_no_port_binding_failure(port) [ 1936.112573] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1936.112573] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1936.113508] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. [ 1936.113508] env[62875]: Removing descriptor: 21 [ 1936.113645] env[62875]: ERROR nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Traceback (most recent call last): [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] yield resources [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self.driver.spawn(context, instance, image_meta, [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] vm_ref = self.build_virtual_machine(instance, [ 1936.113645] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] vif_infos = vmwarevif.get_vif_info(self._session, [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] for vif in network_info: [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] return self._sync_wrapper(fn, *args, **kwargs) [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self.wait() [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self[:] = self._gt.wait() [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] return self._exit_event.wait() [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1936.113902] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] result = hub.switch() [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] return self.greenlet.switch() [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] result = function(*args, **kwargs) [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] return func(*args, **kwargs) [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] raise e [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] nwinfo = self.network_api.allocate_for_instance( [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] created_port_ids = self._update_ports_for_instance( [ 1936.114224] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] with excutils.save_and_reraise_exception(): [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self.force_reraise() [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] raise self.value [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] updated_port = self._update_port( [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] _ensure_no_port_binding_failure(port) [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] raise exception.PortBindingFailed(port_id=port['id']) [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] nova.exception.PortBindingFailed: Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. [ 1936.114537] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] [ 1936.114851] env[62875]: INFO nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Terminating instance [ 1936.231327] env[62875]: DEBUG nova.network.neutron [-] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.362475] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.363517] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1936.366155] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.324s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.417672] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Releasing lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.418049] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1936.418124] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1936.418435] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd9dd517-139e-4a50-a3fc-d825f45646fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.434441] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3648ca-2eaf-4c4c-bcf2-5ca156b85355 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.462787] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 793da91d-461a-465b-b9a3-c5fa0f5b877d could not be found. [ 1936.463097] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1936.463256] env[62875]: INFO nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1936.463665] env[62875]: DEBUG oslo.service.loopingcall [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1936.464973] env[62875]: DEBUG nova.compute.manager [-] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1936.466108] env[62875]: DEBUG nova.network.neutron [-] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1936.469636] env[62875]: DEBUG nova.compute.manager [req-68331137-b605-4df6-a01f-be9d47b68398 req-62d9886e-405c-4843-8776-6e00779e3c96 service nova] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Received event network-vif-deleted-0d3882f3-fd7b-43e1-a099-329e33bc3f23 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1936.493664] env[62875]: DEBUG nova.network.neutron [-] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1936.618326] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.618580] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquired lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.618766] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1936.735540] env[62875]: INFO nova.compute.manager [-] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Took 1.03 seconds to deallocate network for instance. [ 1936.739045] env[62875]: DEBUG nova.compute.claims [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1936.739612] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.871147] env[62875]: DEBUG nova.compute.utils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1936.877078] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1936.877078] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1936.935105] env[62875]: DEBUG nova.policy [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a3ba37cf0af40f7935ac6c82d1bdb15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9bfe4401208844fbb1654de2c3d4908f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1936.996059] env[62875]: DEBUG nova.network.neutron [-] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.135066] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "d0c4095f-2d78-4055-b568-7e70e7c4c182" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.135393] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "d0c4095f-2d78-4055-b568-7e70e7c4c182" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.145666] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1937.218096] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a76dd21-fd0f-4b96-ab50-77b3948c54b4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.226270] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15392f1-3163-4c42-a502-f70ee017f41d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.257717] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3be3930-c769-4e77-8987-07ee4901240d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.265237] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7b3916-ad94-4e96-8dd6-0d17ad2c2b73 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.280285] env[62875]: DEBUG nova.compute.provider_tree [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1937.312669] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Successfully created port: 24684040-75d9-482f-b8e8-74b5e3c5e1eb {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1937.376332] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1937.474964] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.498519] env[62875]: INFO nova.compute.manager [-] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Took 1.03 seconds to deallocate network for instance. [ 1937.503857] env[62875]: DEBUG nova.compute.claims [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1937.504343] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.589320] env[62875]: DEBUG nova.compute.manager [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Received event network-changed-1b726078-d307-4a6d-b485-0a72664ca270 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1937.589692] env[62875]: DEBUG nova.compute.manager [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Refreshing instance network info cache due to event network-changed-1b726078-d307-4a6d-b485-0a72664ca270. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1937.590036] env[62875]: DEBUG oslo_concurrency.lockutils [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] Acquiring lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.784034] env[62875]: DEBUG nova.scheduler.client.report [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1937.981019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Releasing lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.981019] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1937.981019] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1937.981019] env[62875]: DEBUG oslo_concurrency.lockutils [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] Acquired lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.981019] env[62875]: DEBUG nova.network.neutron [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Refreshing network info cache for port 1b726078-d307-4a6d-b485-0a72664ca270 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1937.981289] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3eaefccf-ee22-4199-a72e-0c11ca70a469 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.994016] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1149d8f-76f4-4c3c-a0ba-d3b15637e21f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.021859] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ee68d78-b265-4ee8-afcc-ce6ed150fb73 could not be found. [ 1938.021859] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1938.021859] env[62875]: INFO nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1938.022133] env[62875]: DEBUG oslo.service.loopingcall [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1938.022473] env[62875]: DEBUG nova.compute.manager [-] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1938.022692] env[62875]: DEBUG nova.network.neutron [-] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1938.049064] env[62875]: DEBUG nova.network.neutron [-] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1938.190826] env[62875]: ERROR nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. [ 1938.190826] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1938.190826] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1938.190826] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1938.190826] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1938.190826] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1938.190826] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1938.190826] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1938.190826] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1938.190826] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1938.190826] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1938.190826] env[62875]: ERROR nova.compute.manager raise self.value [ 1938.190826] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1938.190826] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1938.190826] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1938.190826] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1938.191568] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1938.191568] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1938.191568] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. [ 1938.191568] env[62875]: ERROR nova.compute.manager [ 1938.191568] env[62875]: Traceback (most recent call last): [ 1938.191568] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1938.191568] env[62875]: listener.cb(fileno) [ 1938.191568] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1938.191568] env[62875]: result = function(*args, **kwargs) [ 1938.191568] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1938.191568] env[62875]: return func(*args, **kwargs) [ 1938.191568] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1938.191568] env[62875]: raise e [ 1938.191568] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1938.191568] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1938.191568] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1938.191568] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1938.191568] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1938.191568] env[62875]: with excutils.save_and_reraise_exception(): [ 1938.191568] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1938.191568] env[62875]: self.force_reraise() [ 1938.191568] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1938.191568] env[62875]: raise self.value [ 1938.191568] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1938.191568] env[62875]: updated_port = self._update_port( [ 1938.191568] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1938.191568] env[62875]: _ensure_no_port_binding_failure(port) [ 1938.191568] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1938.191568] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1938.192302] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. [ 1938.192302] env[62875]: Removing descriptor: 21 [ 1938.290074] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.924s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1938.290714] env[62875]: ERROR nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Traceback (most recent call last): [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self.driver.spawn(context, instance, image_meta, [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] vm_ref = self.build_virtual_machine(instance, [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] vif_infos = vmwarevif.get_vif_info(self._session, [ 1938.290714] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] for vif in network_info: [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] return self._sync_wrapper(fn, *args, **kwargs) [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self.wait() [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self[:] = self._gt.wait() [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] return self._exit_event.wait() [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] result = hub.switch() [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1938.291153] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] return self.greenlet.switch() [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] result = function(*args, **kwargs) [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] return func(*args, **kwargs) [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] raise e [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] nwinfo = self.network_api.allocate_for_instance( [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] created_port_ids = self._update_ports_for_instance( [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] with excutils.save_and_reraise_exception(): [ 1938.291557] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] self.force_reraise() [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] raise self.value [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] updated_port = self._update_port( [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] _ensure_no_port_binding_failure(port) [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] raise exception.PortBindingFailed(port_id=port['id']) [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] nova.exception.PortBindingFailed: Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. [ 1938.291923] env[62875]: ERROR nova.compute.manager [instance: ec4405a5-de44-4749-9225-3945db05ca6e] [ 1938.292304] env[62875]: DEBUG nova.compute.utils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1938.292618] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.917s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1938.299012] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Build of instance ec4405a5-de44-4749-9225-3945db05ca6e was re-scheduled: Binding failed for port 4d33003e-9d69-4051-87a2-9bc19620ca97, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1938.302572] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1938.302572] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Acquiring lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.302572] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Acquired lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.302572] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1938.391441] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1938.423905] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1938.425117] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1938.425117] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1938.425117] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1938.425117] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1938.425117] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1938.425291] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1938.425291] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1938.425432] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1938.425627] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1938.426225] env[62875]: DEBUG nova.virt.hardware [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1938.426869] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10e0f1c-adc6-4c6a-a00f-845c55123adc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.438282] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9cb3a7-2429-4f65-b58c-db6a0fa0860a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.455018] env[62875]: ERROR nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Traceback (most recent call last): [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] yield resources [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self.driver.spawn(context, instance, image_meta, [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] vm_ref = self.build_virtual_machine(instance, [ 1938.455018] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] vif_infos = vmwarevif.get_vif_info(self._session, [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] for vif in network_info: [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] return self._sync_wrapper(fn, *args, **kwargs) [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self.wait() [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self[:] = self._gt.wait() [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] return self._exit_event.wait() [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1938.455392] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] current.throw(*self._exc) [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] result = function(*args, **kwargs) [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] return func(*args, **kwargs) [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] raise e [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] nwinfo = self.network_api.allocate_for_instance( [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] created_port_ids = self._update_ports_for_instance( [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] with excutils.save_and_reraise_exception(): [ 1938.455694] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self.force_reraise() [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] raise self.value [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] updated_port = self._update_port( [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] _ensure_no_port_binding_failure(port) [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] raise exception.PortBindingFailed(port_id=port['id']) [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] nova.exception.PortBindingFailed: Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. [ 1938.456182] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] [ 1938.456182] env[62875]: INFO nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Terminating instance [ 1938.501999] env[62875]: DEBUG nova.network.neutron [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1938.551576] env[62875]: DEBUG nova.network.neutron [-] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.592831] env[62875]: DEBUG nova.network.neutron [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.831215] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1938.910252] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.959899] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Acquiring lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.960114] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Acquired lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.960300] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1939.057540] env[62875]: INFO nova.compute.manager [-] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Took 1.03 seconds to deallocate network for instance. [ 1939.059865] env[62875]: DEBUG nova.compute.claims [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1939.060060] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.095850] env[62875]: DEBUG oslo_concurrency.lockutils [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] Releasing lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1939.096145] env[62875]: DEBUG nova.compute.manager [req-83aa989c-8f53-4005-ba93-e85de5519b94 req-376559a1-7134-4b0e-9d07-55fcac9aa3c5 service nova] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Received event network-vif-deleted-1b726078-d307-4a6d-b485-0a72664ca270 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1939.412646] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Releasing lock "refresh_cache-ec4405a5-de44-4749-9225-3945db05ca6e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1939.412910] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1939.414031] env[62875]: DEBUG nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1939.414031] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1939.432227] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1939.486729] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1939.603482] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1939.679859] env[62875]: DEBUG nova.compute.manager [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Received event network-changed-24684040-75d9-482f-b8e8-74b5e3c5e1eb {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1939.679859] env[62875]: DEBUG nova.compute.manager [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Refreshing instance network info cache due to event network-changed-24684040-75d9-482f-b8e8-74b5e3c5e1eb. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1939.679859] env[62875]: DEBUG oslo_concurrency.lockutils [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] Acquiring lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.842976] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance ec4405a5-de44-4749-9225-3945db05ca6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1939.842976] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7d241bf8-6f7a-467f-9640-a9819d5cca72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1939.842976] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 793da91d-461a-465b-b9a3-c5fa0f5b877d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1939.842976] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 4ee68d78-b265-4ee8-afcc-ce6ed150fb73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1939.843212] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1939.935515] env[62875]: DEBUG nova.network.neutron [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.105970] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Releasing lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.106448] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1940.106648] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1940.106976] env[62875]: DEBUG oslo_concurrency.lockutils [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] Acquired lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.107167] env[62875]: DEBUG nova.network.neutron [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Refreshing network info cache for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1940.108264] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6cd68135-acf8-4b2e-94b3-3fdba6581ab9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.121568] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7ba061-79bf-4e0a-87ea-595a0342c514 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.146142] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f could not be found. [ 1940.146392] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1940.146605] env[62875]: INFO nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1940.146851] env[62875]: DEBUG oslo.service.loopingcall [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1940.147093] env[62875]: DEBUG nova.compute.manager [-] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1940.147602] env[62875]: DEBUG nova.network.neutron [-] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1940.165278] env[62875]: DEBUG nova.network.neutron [-] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1940.344889] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 6eb92420-57b1-4a7d-973f-10cd47be0416 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.439176] env[62875]: INFO nova.compute.manager [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] [instance: ec4405a5-de44-4749-9225-3945db05ca6e] Took 1.03 seconds to deallocate network for instance. [ 1940.583275] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "8361611a-ad16-43ef-94e0-f2e7e9851682" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.583535] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "8361611a-ad16-43ef-94e0-f2e7e9851682" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.663234] env[62875]: DEBUG nova.network.neutron [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1940.669253] env[62875]: DEBUG nova.network.neutron [-] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.791880] env[62875]: DEBUG nova.network.neutron [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.852685] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a19b7959-df3c-47e8-b920-edfe82c36489 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1941.174040] env[62875]: INFO nova.compute.manager [-] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Took 1.03 seconds to deallocate network for instance. [ 1941.176346] env[62875]: DEBUG nova.compute.claims [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1941.176607] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.292583] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "2106a09b-554e-41dd-aa3a-c190b62d0afc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.292818] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.296589] env[62875]: DEBUG oslo_concurrency.lockutils [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] Releasing lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1941.296818] env[62875]: DEBUG nova.compute.manager [req-bcff8118-ad9b-4303-bf16-09a7f463d673 req-46fcb0f0-c059-4590-b1f4-c2da946771d0 service nova] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Received event network-vif-deleted-24684040-75d9-482f-b8e8-74b5e3c5e1eb {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1941.355600] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance e11311ed-6804-4df4-a775-9060463ac927 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1941.468405] env[62875]: INFO nova.scheduler.client.report [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Deleted allocations for instance ec4405a5-de44-4749-9225-3945db05ca6e [ 1941.859115] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d3270b4a-2b81-41f5-a2af-5b7f441e4a2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1941.979912] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b58d850-d308-4fd5-bc7b-85d1c1f2f4c0 tempest-FloatingIPsAssociationTestJSON-1378039021 tempest-FloatingIPsAssociationTestJSON-1378039021-project-member] Lock "ec4405a5-de44-4749-9225-3945db05ca6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.511s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.363280] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 782e6663-202b-4ed0-8a1a-cc54f246143b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1942.486058] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1942.865942] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 100c3541-3af3-4d3c-8060-2235f18f51e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1943.007504] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.367836] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 77d57f64-9bab-46f1-87b4-62bac5c5d2bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1943.871070] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 5f190f86-2faa-4b8e-821f-2113577541e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1944.373852] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 6f936641-750d-49ae-8beb-bca35305d10d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1944.876312] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1945.380031] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 41ec8810-3f17-4f59-9828-a4a2e873eab4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1945.883230] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 5cf1f620-d0da-4e81-8d1f-e881c47dcad1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1946.385916] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance fd7ba11a-18d1-4f96-a445-eedce740b0c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1946.889414] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7969485a-ccd6-48e0-bdea-b8920af28843 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1947.392458] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c6de797f-03f7-4dca-9c6a-e7b840990be6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1947.896313] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance acc78084-21e8-456c-a573-fc5e931147c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1948.399886] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a64253fe-4ba9-4686-810b-a26a4c29631b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1948.903341] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 305aebbe-f983-4826-b8c0-9854458f7d48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1949.406046] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c1e107cd-5c03-405f-bdae-3281dc4844d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1949.909304] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 37ae8e69-f953-4846-8a21-fed697ea575a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1950.412644] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d0c4095f-2d78-4055-b568-7e70e7c4c182 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1950.413083] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1950.413083] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1950.736349] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdf408b-f64d-4fdd-a017-827495de9113 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.744130] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10390b42-efee-45e0-989c-350b2bca1909 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.775505] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37169bf3-5907-4897-a19f-a43ef02f246a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.785273] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdd8112-c009-4695-a455-61879e20d870 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.797184] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1951.300053] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1951.806062] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1951.806062] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.513s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.806062] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.671s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.807458] env[62875]: INFO nova.compute.claims [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1952.810262] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.810545] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1952.810635] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1953.109481] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9172a1c-9b67-46d9-8821-8a8a20bb700e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.115294] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ca2dd3-8e2a-435c-8e62-e9edc40bbe33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.145551] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7d71ab-8943-4d81-a95f-aada2dec88d5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.153087] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bbbb11-e4fd-4f30-b780-040b11463beb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.166076] env[62875]: DEBUG nova.compute.provider_tree [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1953.314186] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1953.314355] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1953.314441] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1953.314572] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1953.314700] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1953.314831] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1953.315064] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.669321] env[62875]: DEBUG nova.scheduler.client.report [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1954.174457] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.175024] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1954.179020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.114s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.179481] env[62875]: INFO nova.compute.claims [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1954.685573] env[62875]: DEBUG nova.compute.utils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1954.689661] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1954.689901] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1954.741653] env[62875]: DEBUG nova.policy [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f31044c31f04b07a5e4657a6ef015f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67b77a82540742a2a56216fd1d3a990e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1955.015141] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Successfully created port: 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1955.191068] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1955.561185] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174fbd12-7e0a-4fee-a320-5d862741bbc6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.567747] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3426220a-e85d-4de6-81f7-f2cb2dafce0f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.600864] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5990b7-5948-48a1-9f29-2553e7ddebc4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.608760] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cef2da-28ac-4282-b17b-9777c48f1f58 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.624091] env[62875]: DEBUG nova.compute.provider_tree [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1955.810418] env[62875]: DEBUG nova.compute.manager [req-b6644bc0-3400-4ff5-8c82-d2c55f0e3734 req-82f0dc05-661c-4645-88b6-79fc38ea07fd service nova] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Received event network-changed-7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1955.810621] env[62875]: DEBUG nova.compute.manager [req-b6644bc0-3400-4ff5-8c82-d2c55f0e3734 req-82f0dc05-661c-4645-88b6-79fc38ea07fd service nova] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Refreshing instance network info cache due to event network-changed-7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1955.810832] env[62875]: DEBUG oslo_concurrency.lockutils [req-b6644bc0-3400-4ff5-8c82-d2c55f0e3734 req-82f0dc05-661c-4645-88b6-79fc38ea07fd service nova] Acquiring lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.810984] env[62875]: DEBUG oslo_concurrency.lockutils [req-b6644bc0-3400-4ff5-8c82-d2c55f0e3734 req-82f0dc05-661c-4645-88b6-79fc38ea07fd service nova] Acquired lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.811166] env[62875]: DEBUG nova.network.neutron [req-b6644bc0-3400-4ff5-8c82-d2c55f0e3734 req-82f0dc05-661c-4645-88b6-79fc38ea07fd service nova] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Refreshing network info cache for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1955.956897] env[62875]: ERROR nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. [ 1955.956897] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1955.956897] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1955.956897] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1955.956897] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1955.956897] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1955.956897] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1955.956897] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1955.956897] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1955.956897] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1955.956897] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1955.956897] env[62875]: ERROR nova.compute.manager raise self.value [ 1955.956897] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1955.956897] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1955.956897] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1955.956897] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1955.957746] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1955.957746] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1955.957746] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. [ 1955.957746] env[62875]: ERROR nova.compute.manager [ 1955.957746] env[62875]: Traceback (most recent call last): [ 1955.957746] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1955.957746] env[62875]: listener.cb(fileno) [ 1955.957746] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1955.957746] env[62875]: result = function(*args, **kwargs) [ 1955.957746] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1955.957746] env[62875]: return func(*args, **kwargs) [ 1955.957746] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1955.957746] env[62875]: raise e [ 1955.957746] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1955.957746] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1955.957746] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1955.957746] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1955.957746] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1955.957746] env[62875]: with excutils.save_and_reraise_exception(): [ 1955.957746] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1955.957746] env[62875]: self.force_reraise() [ 1955.957746] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1955.957746] env[62875]: raise self.value [ 1955.957746] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1955.957746] env[62875]: updated_port = self._update_port( [ 1955.957746] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1955.957746] env[62875]: _ensure_no_port_binding_failure(port) [ 1955.957746] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1955.957746] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1955.959073] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. [ 1955.959073] env[62875]: Removing descriptor: 21 [ 1956.128248] env[62875]: DEBUG nova.scheduler.client.report [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1956.203891] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1956.230493] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1956.230742] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1956.230901] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1956.231100] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1956.231253] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1956.231400] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1956.231608] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1956.231766] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1956.231930] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1956.232105] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1956.232277] env[62875]: DEBUG nova.virt.hardware [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1956.233140] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df19780-0bca-46c8-be52-388820ce9a32 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.241040] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e36518-2075-4aa4-82b2-d1c80438d066 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.254701] env[62875]: ERROR nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Traceback (most recent call last): [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] yield resources [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self.driver.spawn(context, instance, image_meta, [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] vm_ref = self.build_virtual_machine(instance, [ 1956.254701] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] vif_infos = vmwarevif.get_vif_info(self._session, [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] for vif in network_info: [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] return self._sync_wrapper(fn, *args, **kwargs) [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self.wait() [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self[:] = self._gt.wait() [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] return self._exit_event.wait() [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1956.255111] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] current.throw(*self._exc) [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] result = function(*args, **kwargs) [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] return func(*args, **kwargs) [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] raise e [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] nwinfo = self.network_api.allocate_for_instance( [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] created_port_ids = self._update_ports_for_instance( [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] with excutils.save_and_reraise_exception(): [ 1956.255501] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self.force_reraise() [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] raise self.value [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] updated_port = self._update_port( [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] _ensure_no_port_binding_failure(port) [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] raise exception.PortBindingFailed(port_id=port['id']) [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] nova.exception.PortBindingFailed: Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. [ 1956.256107] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] [ 1956.256107] env[62875]: INFO nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Terminating instance [ 1956.328842] env[62875]: DEBUG nova.network.neutron [req-b6644bc0-3400-4ff5-8c82-d2c55f0e3734 req-82f0dc05-661c-4645-88b6-79fc38ea07fd service nova] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1956.403188] env[62875]: DEBUG nova.network.neutron [req-b6644bc0-3400-4ff5-8c82-d2c55f0e3734 req-82f0dc05-661c-4645-88b6-79fc38ea07fd service nova] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.633615] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.634161] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1956.637442] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.530s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.638905] env[62875]: INFO nova.compute.claims [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1956.759421] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.905584] env[62875]: DEBUG oslo_concurrency.lockutils [req-b6644bc0-3400-4ff5-8c82-d2c55f0e3734 req-82f0dc05-661c-4645-88b6-79fc38ea07fd service nova] Releasing lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.906306] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquired lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.906529] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1957.146346] env[62875]: DEBUG nova.compute.utils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1957.148231] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1957.148476] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1957.194935] env[62875]: DEBUG nova.policy [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e320b30dc2654255b311abd163a9d1c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a7eabc1c26f4984af1f1cac8faa066a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1957.422983] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1957.486701] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Successfully created port: 05ad56d5-0f49-49e7-ae3f-dc1280a7199d {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1957.520264] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.651723] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1957.912780] env[62875]: DEBUG nova.compute.manager [req-7b0206f2-9f7e-4e8b-8acf-39773342b088 req-1cf38e35-d087-41b8-99f4-5af8a8a8cf75 service nova] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Received event network-vif-deleted-7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1957.998755] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd12466-c377-4d0d-b958-5328e90816df {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.006889] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d781995f-2547-438c-b9f1-99532f6aeede {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.037048] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Releasing lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1958.037584] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1958.038132] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1958.038275] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7598b632-ca66-4318-9aa8-aa26758e3553 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.040490] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d71090c-f115-4546-9204-4901f58586fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.048808] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fafec7-bf81-462f-b53b-61e21b011372 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.054831] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4af89a-d39e-439b-b7ae-6c69ce3679fb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.075281] env[62875]: DEBUG nova.compute.provider_tree [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1958.081194] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6eb92420-57b1-4a7d-973f-10cd47be0416 could not be found. [ 1958.081446] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1958.081663] env[62875]: INFO nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1958.081932] env[62875]: DEBUG oslo.service.loopingcall [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1958.082394] env[62875]: DEBUG nova.compute.manager [-] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1958.082529] env[62875]: DEBUG nova.network.neutron [-] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1958.103132] env[62875]: DEBUG nova.network.neutron [-] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1958.276411] env[62875]: ERROR nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. [ 1958.276411] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1958.276411] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1958.276411] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1958.276411] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1958.276411] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1958.276411] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1958.276411] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1958.276411] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1958.276411] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1958.276411] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1958.276411] env[62875]: ERROR nova.compute.manager raise self.value [ 1958.276411] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1958.276411] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1958.276411] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1958.276411] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1958.276961] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1958.276961] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1958.276961] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. [ 1958.276961] env[62875]: ERROR nova.compute.manager [ 1958.276961] env[62875]: Traceback (most recent call last): [ 1958.276961] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1958.276961] env[62875]: listener.cb(fileno) [ 1958.276961] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1958.276961] env[62875]: result = function(*args, **kwargs) [ 1958.276961] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1958.276961] env[62875]: return func(*args, **kwargs) [ 1958.276961] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1958.276961] env[62875]: raise e [ 1958.276961] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1958.276961] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1958.276961] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1958.276961] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1958.276961] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1958.276961] env[62875]: with excutils.save_and_reraise_exception(): [ 1958.276961] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1958.276961] env[62875]: self.force_reraise() [ 1958.276961] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1958.276961] env[62875]: raise self.value [ 1958.276961] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1958.276961] env[62875]: updated_port = self._update_port( [ 1958.276961] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1958.276961] env[62875]: _ensure_no_port_binding_failure(port) [ 1958.276961] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1958.276961] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1958.277807] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. [ 1958.277807] env[62875]: Removing descriptor: 21 [ 1958.584158] env[62875]: DEBUG nova.scheduler.client.report [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1958.605135] env[62875]: DEBUG nova.network.neutron [-] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.663709] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1958.688392] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1958.688733] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1958.689012] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1958.689228] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1958.689382] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1958.689528] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1958.689789] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1958.689930] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1958.690126] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1958.690292] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1958.690462] env[62875]: DEBUG nova.virt.hardware [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1958.691397] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b82dbe4-dc06-4f9c-b95e-734c6ab680ec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.700022] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de512756-69ce-4211-b7c9-1aa5cecebe87 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.712919] env[62875]: ERROR nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Traceback (most recent call last): [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] yield resources [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self.driver.spawn(context, instance, image_meta, [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] vm_ref = self.build_virtual_machine(instance, [ 1958.712919] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] vif_infos = vmwarevif.get_vif_info(self._session, [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] for vif in network_info: [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] return self._sync_wrapper(fn, *args, **kwargs) [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self.wait() [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self[:] = self._gt.wait() [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] return self._exit_event.wait() [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1958.713372] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] current.throw(*self._exc) [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] result = function(*args, **kwargs) [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] return func(*args, **kwargs) [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] raise e [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] nwinfo = self.network_api.allocate_for_instance( [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] created_port_ids = self._update_ports_for_instance( [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] with excutils.save_and_reraise_exception(): [ 1958.713776] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self.force_reraise() [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] raise self.value [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] updated_port = self._update_port( [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] _ensure_no_port_binding_failure(port) [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] raise exception.PortBindingFailed(port_id=port['id']) [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] nova.exception.PortBindingFailed: Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. [ 1958.714165] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] [ 1958.714165] env[62875]: INFO nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Terminating instance [ 1959.090103] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.090103] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1959.092679] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.009s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.094179] env[62875]: INFO nova.compute.claims [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1959.108015] env[62875]: INFO nova.compute.manager [-] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Took 1.03 seconds to deallocate network for instance. [ 1959.110014] env[62875]: DEBUG nova.compute.claims [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1959.110201] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.218804] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Acquiring lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.219562] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Acquired lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.219562] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1959.603367] env[62875]: DEBUG nova.compute.utils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1959.605547] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1959.605823] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1959.648354] env[62875]: DEBUG nova.policy [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '838ae1184cb3480bae374e1d8cf17426', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '054eca3e32604ac682d521a8951bd4b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1959.738055] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1959.833194] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1959.939651] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Successfully created port: 06b387d5-481c-4736-9fe2-693ab649f0f5 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1959.944522] env[62875]: DEBUG nova.compute.manager [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Received event network-changed-05ad56d5-0f49-49e7-ae3f-dc1280a7199d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1959.944713] env[62875]: DEBUG nova.compute.manager [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Refreshing instance network info cache due to event network-changed-05ad56d5-0f49-49e7-ae3f-dc1280a7199d. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1959.944901] env[62875]: DEBUG oslo_concurrency.lockutils [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] Acquiring lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.106633] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1960.335688] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Releasing lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.335962] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1960.336171] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1960.336477] env[62875]: DEBUG oslo_concurrency.lockutils [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] Acquired lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.336652] env[62875]: DEBUG nova.network.neutron [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Refreshing network info cache for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1960.337723] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5cad1bc7-66b8-4fab-8b28-4abeab8790c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.351525] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de5d79d-f456-4e78-bb5b-f00248e8c162 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.378028] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a19b7959-df3c-47e8-b920-edfe82c36489 could not be found. [ 1960.378259] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1960.379279] env[62875]: INFO nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1960.379279] env[62875]: DEBUG oslo.service.loopingcall [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1960.381807] env[62875]: DEBUG nova.compute.manager [-] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1960.381916] env[62875]: DEBUG nova.network.neutron [-] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1960.407313] env[62875]: DEBUG nova.network.neutron [-] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1960.468088] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14c417b-fece-4951-ab75-ba7ffc2f18d4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.475771] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b069af92-fba1-453e-81fc-dac23da7630a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.509458] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ebaf62-c8b2-4e1f-9b97-cf468a3aa0ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.516850] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f31c9a-d149-4617-9eab-4d5bc563f80c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.530073] env[62875]: DEBUG nova.compute.provider_tree [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1960.763995] env[62875]: ERROR nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. [ 1960.763995] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1960.763995] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1960.763995] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1960.763995] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1960.763995] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1960.763995] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1960.763995] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1960.763995] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1960.763995] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1960.763995] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1960.763995] env[62875]: ERROR nova.compute.manager raise self.value [ 1960.763995] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1960.763995] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1960.763995] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1960.763995] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1960.764954] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1960.764954] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1960.764954] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. [ 1960.764954] env[62875]: ERROR nova.compute.manager [ 1960.764954] env[62875]: Traceback (most recent call last): [ 1960.764954] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1960.764954] env[62875]: listener.cb(fileno) [ 1960.764954] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1960.764954] env[62875]: result = function(*args, **kwargs) [ 1960.764954] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1960.764954] env[62875]: return func(*args, **kwargs) [ 1960.764954] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1960.764954] env[62875]: raise e [ 1960.764954] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1960.764954] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1960.764954] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1960.764954] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1960.764954] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1960.764954] env[62875]: with excutils.save_and_reraise_exception(): [ 1960.764954] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1960.764954] env[62875]: self.force_reraise() [ 1960.764954] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1960.764954] env[62875]: raise self.value [ 1960.764954] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1960.764954] env[62875]: updated_port = self._update_port( [ 1960.764954] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1960.764954] env[62875]: _ensure_no_port_binding_failure(port) [ 1960.764954] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1960.764954] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1960.765845] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. [ 1960.765845] env[62875]: Removing descriptor: 21 [ 1960.857788] env[62875]: DEBUG nova.network.neutron [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1960.910067] env[62875]: DEBUG nova.network.neutron [-] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.928269] env[62875]: DEBUG nova.network.neutron [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.033241] env[62875]: DEBUG nova.scheduler.client.report [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1961.117245] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1961.142268] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1961.142504] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1961.142665] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1961.142845] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1961.142992] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1961.143157] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1961.143361] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1961.143521] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1961.143682] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1961.143841] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1961.144025] env[62875]: DEBUG nova.virt.hardware [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1961.144883] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6e6071-674e-4d6f-8d78-a2eaad3c70de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.152977] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf652c85-20e2-41dc-b613-1b0949dc0d01 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.168049] env[62875]: ERROR nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] Traceback (most recent call last): [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] yield resources [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self.driver.spawn(context, instance, image_meta, [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] vm_ref = self.build_virtual_machine(instance, [ 1961.168049] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] vif_infos = vmwarevif.get_vif_info(self._session, [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] for vif in network_info: [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] return self._sync_wrapper(fn, *args, **kwargs) [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self.wait() [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self[:] = self._gt.wait() [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] return self._exit_event.wait() [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1961.168471] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] current.throw(*self._exc) [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] result = function(*args, **kwargs) [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] return func(*args, **kwargs) [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] raise e [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] nwinfo = self.network_api.allocate_for_instance( [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] created_port_ids = self._update_ports_for_instance( [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] with excutils.save_and_reraise_exception(): [ 1961.168933] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self.force_reraise() [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] raise self.value [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] updated_port = self._update_port( [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] _ensure_no_port_binding_failure(port) [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] raise exception.PortBindingFailed(port_id=port['id']) [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] nova.exception.PortBindingFailed: Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. [ 1961.169352] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] [ 1961.169352] env[62875]: INFO nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Terminating instance [ 1961.412679] env[62875]: INFO nova.compute.manager [-] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Took 1.03 seconds to deallocate network for instance. [ 1961.415127] env[62875]: DEBUG nova.compute.claims [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1961.415315] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.431023] env[62875]: DEBUG oslo_concurrency.lockutils [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] Releasing lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.431289] env[62875]: DEBUG nova.compute.manager [req-c61be0cf-3b84-408b-acf7-4cd32db644eb req-9779532e-ed51-46c9-a6b9-94841fc6aaf3 service nova] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Received event network-vif-deleted-05ad56d5-0f49-49e7-ae3f-dc1280a7199d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1961.538013] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.538588] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1961.541428] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.530s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.542854] env[62875]: INFO nova.compute.claims [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1961.673240] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Acquiring lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.673632] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Acquired lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.673924] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1961.972572] env[62875]: DEBUG nova.compute.manager [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] [instance: e11311ed-6804-4df4-a775-9060463ac927] Received event network-changed-06b387d5-481c-4736-9fe2-693ab649f0f5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1961.972894] env[62875]: DEBUG nova.compute.manager [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] [instance: e11311ed-6804-4df4-a775-9060463ac927] Refreshing instance network info cache due to event network-changed-06b387d5-481c-4736-9fe2-693ab649f0f5. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1961.972972] env[62875]: DEBUG oslo_concurrency.lockutils [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] Acquiring lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1962.046730] env[62875]: DEBUG nova.compute.utils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1962.050016] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1962.050198] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1962.096340] env[62875]: DEBUG nova.policy [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38b8367ae96345c0b1fefa9911ac918a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a544fbea33864c838b89eec626937d38', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1962.197057] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1962.284312] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.367323] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Successfully created port: 62c504ab-9c7d-4989-8611-c9cad1eb9826 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1962.551384] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1962.788642] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Releasing lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.790729] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1962.790729] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1962.791874] env[62875]: DEBUG oslo_concurrency.lockutils [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] Acquired lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.792118] env[62875]: DEBUG nova.network.neutron [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] [instance: e11311ed-6804-4df4-a775-9060463ac927] Refreshing network info cache for port 06b387d5-481c-4736-9fe2-693ab649f0f5 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1962.793427] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c0d6cd6-56a5-4c8a-b6f2-1fd10ff9da39 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.804437] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7ab177-dc9c-4e87-a18a-7d50922f9c44 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.831986] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e11311ed-6804-4df4-a775-9060463ac927 could not be found. [ 1962.832226] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1962.832475] env[62875]: INFO nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1962.832719] env[62875]: DEBUG oslo.service.loopingcall [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1962.835119] env[62875]: DEBUG nova.compute.manager [-] [instance: e11311ed-6804-4df4-a775-9060463ac927] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1962.835229] env[62875]: DEBUG nova.network.neutron [-] [instance: e11311ed-6804-4df4-a775-9060463ac927] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1962.877913] env[62875]: DEBUG nova.network.neutron [-] [instance: e11311ed-6804-4df4-a775-9060463ac927] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1963.009734] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83059ff-21c0-43f7-8dda-5a3bf8e73120 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.017176] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b7496e-c7af-4e23-921e-c7617a759beb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.048122] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18f7dba-cb82-4369-bf07-e95795db1d99 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.055421] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8904eb1a-3f7a-45d8-b4cd-6d3d40f49dc7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.072392] env[62875]: DEBUG nova.compute.provider_tree [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1963.181628] env[62875]: ERROR nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. [ 1963.181628] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1963.181628] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1963.181628] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1963.181628] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1963.181628] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1963.181628] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1963.181628] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1963.181628] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1963.181628] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1963.181628] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1963.181628] env[62875]: ERROR nova.compute.manager raise self.value [ 1963.181628] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1963.181628] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1963.181628] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1963.181628] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1963.182242] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1963.182242] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1963.182242] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. [ 1963.182242] env[62875]: ERROR nova.compute.manager [ 1963.182242] env[62875]: Traceback (most recent call last): [ 1963.182242] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1963.182242] env[62875]: listener.cb(fileno) [ 1963.182242] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1963.182242] env[62875]: result = function(*args, **kwargs) [ 1963.182242] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1963.182242] env[62875]: return func(*args, **kwargs) [ 1963.182242] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1963.182242] env[62875]: raise e [ 1963.182242] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1963.182242] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1963.182242] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1963.182242] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1963.182242] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1963.182242] env[62875]: with excutils.save_and_reraise_exception(): [ 1963.182242] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1963.182242] env[62875]: self.force_reraise() [ 1963.182242] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1963.182242] env[62875]: raise self.value [ 1963.182242] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1963.182242] env[62875]: updated_port = self._update_port( [ 1963.182242] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1963.182242] env[62875]: _ensure_no_port_binding_failure(port) [ 1963.182242] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1963.182242] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1963.183369] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. [ 1963.183369] env[62875]: Removing descriptor: 21 [ 1963.312719] env[62875]: DEBUG nova.network.neutron [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] [instance: e11311ed-6804-4df4-a775-9060463ac927] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1963.379789] env[62875]: DEBUG nova.network.neutron [-] [instance: e11311ed-6804-4df4-a775-9060463ac927] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.383425] env[62875]: DEBUG nova.network.neutron [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] [instance: e11311ed-6804-4df4-a775-9060463ac927] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.565319] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1963.577331] env[62875]: DEBUG nova.scheduler.client.report [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1963.592016] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1963.592270] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1963.592429] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1963.592616] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1963.592773] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1963.592916] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1963.593128] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1963.593283] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1963.593444] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1963.593600] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1963.593767] env[62875]: DEBUG nova.virt.hardware [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1963.594632] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb18adf-1e9a-45f9-b3ae-498843e37a40 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.603365] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59af71e8-d3fc-4834-8b35-78f1d0e3cc66 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.617300] env[62875]: ERROR nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Traceback (most recent call last): [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] yield resources [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self.driver.spawn(context, instance, image_meta, [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] vm_ref = self.build_virtual_machine(instance, [ 1963.617300] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] vif_infos = vmwarevif.get_vif_info(self._session, [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] for vif in network_info: [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] return self._sync_wrapper(fn, *args, **kwargs) [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self.wait() [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self[:] = self._gt.wait() [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] return self._exit_event.wait() [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1963.617713] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] current.throw(*self._exc) [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] result = function(*args, **kwargs) [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] return func(*args, **kwargs) [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] raise e [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] nwinfo = self.network_api.allocate_for_instance( [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] created_port_ids = self._update_ports_for_instance( [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] with excutils.save_and_reraise_exception(): [ 1963.618210] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self.force_reraise() [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] raise self.value [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] updated_port = self._update_port( [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] _ensure_no_port_binding_failure(port) [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] raise exception.PortBindingFailed(port_id=port['id']) [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] nova.exception.PortBindingFailed: Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. [ 1963.618620] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] [ 1963.618620] env[62875]: INFO nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Terminating instance [ 1963.882864] env[62875]: INFO nova.compute.manager [-] [instance: e11311ed-6804-4df4-a775-9060463ac927] Took 1.05 seconds to deallocate network for instance. [ 1963.885252] env[62875]: DEBUG nova.compute.claims [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1963.885435] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.886977] env[62875]: DEBUG oslo_concurrency.lockutils [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] Releasing lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.887233] env[62875]: DEBUG nova.compute.manager [req-c281aead-7bd8-41d5-8be9-ddeb9e055249 req-82a4f278-fb40-4c25-b190-a509316a7491 service nova] [instance: e11311ed-6804-4df4-a775-9060463ac927] Received event network-vif-deleted-06b387d5-481c-4736-9fe2-693ab649f0f5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1964.013860] env[62875]: DEBUG nova.compute.manager [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Received event network-changed-62c504ab-9c7d-4989-8611-c9cad1eb9826 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1964.014131] env[62875]: DEBUG nova.compute.manager [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Refreshing instance network info cache due to event network-changed-62c504ab-9c7d-4989-8611-c9cad1eb9826. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1964.014377] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] Acquiring lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.014572] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] Acquired lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.014778] env[62875]: DEBUG nova.network.neutron [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Refreshing network info cache for port 62c504ab-9c7d-4989-8611-c9cad1eb9826 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1964.082369] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.082886] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1964.086052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.346s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.121614] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Acquiring lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.533585] env[62875]: DEBUG nova.network.neutron [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1964.587637] env[62875]: DEBUG nova.compute.utils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1964.589057] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1964.589259] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1964.601377] env[62875]: DEBUG nova.network.neutron [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1964.631150] env[62875]: DEBUG nova.policy [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86e44a4e203b49e09a8f9d2bb45b8079', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95d0f81815ea467cbc1c6160e27409fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1964.909096] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39abb52-4024-4052-b0d0-9ea3f64f79f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.916737] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13603ef-f7f0-4a7d-97d8-dcbd52616ec4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.948715] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b75eb1-5abe-4b00-9cf6-16bfe13e7f67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.956573] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa249d7-e2f5-4bb6-bc04-70027a818914 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.970168] env[62875]: DEBUG nova.compute.provider_tree [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1965.077096] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Successfully created port: 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1965.094849] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1965.105622] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] Releasing lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1965.105622] env[62875]: DEBUG nova.compute.manager [req-f0e55c44-2e7d-45ed-a202-ba7e9cd9ce02 req-4e5f8e92-dac8-4ee6-a107-48b4e4b11f28 service nova] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Received event network-vif-deleted-62c504ab-9c7d-4989-8611-c9cad1eb9826 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1965.105622] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Acquired lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.105622] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1965.473616] env[62875]: DEBUG nova.scheduler.client.report [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1965.624852] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1965.704887] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.706171] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.706573] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.706790] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.706961] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.707114] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1965.931563] env[62875]: DEBUG nova.compute.manager [req-79eb0bf0-4ecf-4a39-8abd-b5d69aa4936d req-50a8d90f-cece-40a8-83e4-2ec7b5fbabef service nova] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Received event network-changed-5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1965.931768] env[62875]: DEBUG nova.compute.manager [req-79eb0bf0-4ecf-4a39-8abd-b5d69aa4936d req-50a8d90f-cece-40a8-83e4-2ec7b5fbabef service nova] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Refreshing instance network info cache due to event network-changed-5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1965.931990] env[62875]: DEBUG oslo_concurrency.lockutils [req-79eb0bf0-4ecf-4a39-8abd-b5d69aa4936d req-50a8d90f-cece-40a8-83e4-2ec7b5fbabef service nova] Acquiring lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.932155] env[62875]: DEBUG oslo_concurrency.lockutils [req-79eb0bf0-4ecf-4a39-8abd-b5d69aa4936d req-50a8d90f-cece-40a8-83e4-2ec7b5fbabef service nova] Acquired lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.932310] env[62875]: DEBUG nova.network.neutron [req-79eb0bf0-4ecf-4a39-8abd-b5d69aa4936d req-50a8d90f-cece-40a8-83e4-2ec7b5fbabef service nova] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Refreshing network info cache for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1965.982391] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.896s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.984866] env[62875]: ERROR nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Traceback (most recent call last): [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self.driver.spawn(context, instance, image_meta, [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] vm_ref = self.build_virtual_machine(instance, [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] vif_infos = vmwarevif.get_vif_info(self._session, [ 1965.984866] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] for vif in network_info: [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] return self._sync_wrapper(fn, *args, **kwargs) [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self.wait() [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self[:] = self._gt.wait() [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] return self._exit_event.wait() [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] result = hub.switch() [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1965.985314] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] return self.greenlet.switch() [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] result = function(*args, **kwargs) [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] return func(*args, **kwargs) [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] raise e [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] nwinfo = self.network_api.allocate_for_instance( [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] created_port_ids = self._update_ports_for_instance( [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] with excutils.save_and_reraise_exception(): [ 1965.985681] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] self.force_reraise() [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] raise self.value [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] updated_port = self._update_port( [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] _ensure_no_port_binding_failure(port) [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] raise exception.PortBindingFailed(port_id=port['id']) [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] nova.exception.PortBindingFailed: Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. [ 1965.986046] env[62875]: ERROR nova.compute.manager [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] [ 1965.986370] env[62875]: DEBUG nova.compute.utils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1965.986370] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.481s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.988077] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Build of instance 7d241bf8-6f7a-467f-9640-a9819d5cca72 was re-scheduled: Binding failed for port 3f7cb73d-7dd7-4e15-aba1-71954a9c8e6d, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1965.989636] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1965.989636] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.989636] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.989636] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1966.109284] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1966.134230] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1966.134588] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1966.134809] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1966.135067] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1966.135309] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1966.135946] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1966.135946] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1966.135946] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1966.136562] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1966.137482] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1966.137693] env[62875]: DEBUG nova.virt.hardware [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1966.138548] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9241bfe-c34d-41c0-bf28-d5d9673a9ca9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.141900] env[62875]: ERROR nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. [ 1966.141900] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1966.141900] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1966.141900] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1966.141900] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1966.141900] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1966.141900] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1966.141900] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1966.141900] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1966.141900] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1966.141900] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1966.141900] env[62875]: ERROR nova.compute.manager raise self.value [ 1966.141900] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1966.141900] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1966.141900] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1966.141900] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1966.142344] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1966.142344] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1966.142344] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. [ 1966.142344] env[62875]: ERROR nova.compute.manager [ 1966.142344] env[62875]: Traceback (most recent call last): [ 1966.142344] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1966.142344] env[62875]: listener.cb(fileno) [ 1966.142344] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1966.142344] env[62875]: result = function(*args, **kwargs) [ 1966.142344] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1966.142344] env[62875]: return func(*args, **kwargs) [ 1966.142344] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1966.142344] env[62875]: raise e [ 1966.142344] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1966.142344] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1966.142344] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1966.142344] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1966.142344] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1966.142344] env[62875]: with excutils.save_and_reraise_exception(): [ 1966.142344] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1966.142344] env[62875]: self.force_reraise() [ 1966.142344] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1966.142344] env[62875]: raise self.value [ 1966.142344] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1966.142344] env[62875]: updated_port = self._update_port( [ 1966.142344] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1966.142344] env[62875]: _ensure_no_port_binding_failure(port) [ 1966.142344] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1966.142344] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1966.143214] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. [ 1966.143214] env[62875]: Removing descriptor: 18 [ 1966.147516] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37530f4-5767-4238-903b-2e39acbd8d52 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.163085] env[62875]: ERROR nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Traceback (most recent call last): [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] yield resources [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self.driver.spawn(context, instance, image_meta, [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] vm_ref = self.build_virtual_machine(instance, [ 1966.163085] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] vif_infos = vmwarevif.get_vif_info(self._session, [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] for vif in network_info: [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] return self._sync_wrapper(fn, *args, **kwargs) [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self.wait() [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self[:] = self._gt.wait() [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] return self._exit_event.wait() [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1966.163514] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] current.throw(*self._exc) [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] result = function(*args, **kwargs) [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] return func(*args, **kwargs) [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] raise e [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] nwinfo = self.network_api.allocate_for_instance( [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] created_port_ids = self._update_ports_for_instance( [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] with excutils.save_and_reraise_exception(): [ 1966.163849] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self.force_reraise() [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] raise self.value [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] updated_port = self._update_port( [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] _ensure_no_port_binding_failure(port) [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] raise exception.PortBindingFailed(port_id=port['id']) [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] nova.exception.PortBindingFailed: Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. [ 1966.164214] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] [ 1966.164214] env[62875]: INFO nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Terminating instance [ 1966.207190] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Releasing lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.207615] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1966.207802] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1966.208097] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7df3acb-026d-479b-b3c2-043d057149ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.216969] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec0a341-f62e-4085-973c-22b30875fd2f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.238985] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d3270b4a-2b81-41f5-a2af-5b7f441e4a2d could not be found. [ 1966.238985] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1966.239140] env[62875]: INFO nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1966.239272] env[62875]: DEBUG oslo.service.loopingcall [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.239816] env[62875]: DEBUG nova.compute.manager [-] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1966.239929] env[62875]: DEBUG nova.network.neutron [-] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1966.255013] env[62875]: DEBUG nova.network.neutron [-] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1966.450509] env[62875]: DEBUG nova.network.neutron [req-79eb0bf0-4ecf-4a39-8abd-b5d69aa4936d req-50a8d90f-cece-40a8-83e4-2ec7b5fbabef service nova] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1966.507421] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1966.538841] env[62875]: DEBUG nova.network.neutron [req-79eb0bf0-4ecf-4a39-8abd-b5d69aa4936d req-50a8d90f-cece-40a8-83e4-2ec7b5fbabef service nova] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.591496] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.666993] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1966.757710] env[62875]: DEBUG nova.network.neutron [-] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.794538] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d75e197-d4ef-4b24-bce6-52c69bb79a59 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.802398] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e48a694-6d97-4fce-9654-1ef5eef9e092 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.832295] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a52185-5482-4a28-8141-b722ef443a9b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.839166] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79bf26e3-09f1-49d1-af7a-b20a139b591e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.851868] env[62875]: DEBUG nova.compute.provider_tree [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1967.041817] env[62875]: DEBUG oslo_concurrency.lockutils [req-79eb0bf0-4ecf-4a39-8abd-b5d69aa4936d req-50a8d90f-cece-40a8-83e4-2ec7b5fbabef service nova] Releasing lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1967.042288] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.042483] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1967.096218] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "refresh_cache-7d241bf8-6f7a-467f-9640-a9819d5cca72" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1967.096476] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1967.096662] env[62875]: DEBUG nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1967.096830] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1967.112661] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1967.261032] env[62875]: INFO nova.compute.manager [-] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Took 1.02 seconds to deallocate network for instance. [ 1967.263437] env[62875]: DEBUG nova.compute.claims [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1967.263437] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.354643] env[62875]: DEBUG nova.scheduler.client.report [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1967.561241] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1967.617574] env[62875]: DEBUG nova.network.neutron [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.638116] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.859952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.875s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.860618] env[62875]: ERROR nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Traceback (most recent call last): [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self.driver.spawn(context, instance, image_meta, [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] vm_ref = self.build_virtual_machine(instance, [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] vif_infos = vmwarevif.get_vif_info(self._session, [ 1967.860618] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] for vif in network_info: [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] return self._sync_wrapper(fn, *args, **kwargs) [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self.wait() [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self[:] = self._gt.wait() [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] return self._exit_event.wait() [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] result = hub.switch() [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1967.860979] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] return self.greenlet.switch() [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] result = function(*args, **kwargs) [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] return func(*args, **kwargs) [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] raise e [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] nwinfo = self.network_api.allocate_for_instance( [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] created_port_ids = self._update_ports_for_instance( [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] with excutils.save_and_reraise_exception(): [ 1967.861369] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] self.force_reraise() [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] raise self.value [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] updated_port = self._update_port( [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] _ensure_no_port_binding_failure(port) [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] raise exception.PortBindingFailed(port_id=port['id']) [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] nova.exception.PortBindingFailed: Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. [ 1967.861739] env[62875]: ERROR nova.compute.manager [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] [ 1967.862075] env[62875]: DEBUG nova.compute.utils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1967.862485] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.802s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.865256] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Build of instance 793da91d-461a-465b-b9a3-c5fa0f5b877d was re-scheduled: Binding failed for port 0d3882f3-fd7b-43e1-a099-329e33bc3f23, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1967.865674] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1967.865915] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquiring lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1967.866077] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Acquired lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.866239] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1967.989143] env[62875]: DEBUG nova.compute.manager [req-a21a02e7-0e33-4b4e-9bc5-b8e1a09335f2 req-357f8262-e2a1-4bb5-b8be-53bb3a427151 service nova] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Received event network-vif-deleted-5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1968.120060] env[62875]: INFO nova.compute.manager [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 7d241bf8-6f7a-467f-9640-a9819d5cca72] Took 1.02 seconds to deallocate network for instance. [ 1968.140763] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.140763] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1968.140962] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1968.141524] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f86d489-e15a-4584-a881-daeeda158b99 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.149916] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae8b0bd-928d-4606-a158-d50fe0efb407 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.172496] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 782e6663-202b-4ed0-8a1a-cc54f246143b could not be found. [ 1968.172709] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1968.172889] env[62875]: INFO nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1968.173150] env[62875]: DEBUG oslo.service.loopingcall [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1968.173366] env[62875]: DEBUG nova.compute.manager [-] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1968.173461] env[62875]: DEBUG nova.network.neutron [-] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1968.189600] env[62875]: DEBUG nova.network.neutron [-] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1968.388964] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1968.442221] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.662714] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b68593d-0cef-49b6-8dcd-d74c223266c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.671485] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d770301-87c1-48c9-93a0-a1aa675ea7a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.703034] env[62875]: DEBUG nova.network.neutron [-] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.704470] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56be8b5b-6b5a-42bd-b884-349cb848afec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.707926] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1968.713529] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95ba424-1df0-4f1e-bc51-14e73d344ce2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.728714] env[62875]: DEBUG nova.compute.provider_tree [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.944821] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Releasing lock "refresh_cache-793da91d-461a-465b-b9a3-c5fa0f5b877d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.945099] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1968.945272] env[62875]: DEBUG nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1968.945444] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1968.961096] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1969.145963] env[62875]: INFO nova.scheduler.client.report [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted allocations for instance 7d241bf8-6f7a-467f-9640-a9819d5cca72 [ 1969.208919] env[62875]: INFO nova.compute.manager [-] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Took 1.04 seconds to deallocate network for instance. [ 1969.211271] env[62875]: DEBUG nova.compute.claims [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1969.211409] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.231539] env[62875]: DEBUG nova.scheduler.client.report [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1969.464899] env[62875]: DEBUG nova.network.neutron [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.654687] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0aec3946-8d3f-41c1-8763-49344ddc1f9e tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "7d241bf8-6f7a-467f-9640-a9819d5cca72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.749s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.735995] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.873s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.736694] env[62875]: ERROR nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Traceback (most recent call last): [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self.driver.spawn(context, instance, image_meta, [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] vm_ref = self.build_virtual_machine(instance, [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] vif_infos = vmwarevif.get_vif_info(self._session, [ 1969.736694] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] for vif in network_info: [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] return self._sync_wrapper(fn, *args, **kwargs) [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self.wait() [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self[:] = self._gt.wait() [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] return self._exit_event.wait() [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] result = hub.switch() [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1969.737051] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] return self.greenlet.switch() [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] result = function(*args, **kwargs) [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] return func(*args, **kwargs) [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] raise e [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] nwinfo = self.network_api.allocate_for_instance( [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] created_port_ids = self._update_ports_for_instance( [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] with excutils.save_and_reraise_exception(): [ 1969.737495] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] self.force_reraise() [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] raise self.value [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] updated_port = self._update_port( [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] _ensure_no_port_binding_failure(port) [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] raise exception.PortBindingFailed(port_id=port['id']) [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] nova.exception.PortBindingFailed: Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. [ 1969.737813] env[62875]: ERROR nova.compute.manager [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] [ 1969.738094] env[62875]: DEBUG nova.compute.utils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1969.738539] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.562s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.741456] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Build of instance 4ee68d78-b265-4ee8-afcc-ce6ed150fb73 was re-scheduled: Binding failed for port 1b726078-d307-4a6d-b485-0a72664ca270, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1969.741879] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1969.742113] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.742261] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquired lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.742469] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1969.967414] env[62875]: INFO nova.compute.manager [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] [instance: 793da91d-461a-465b-b9a3-c5fa0f5b877d] Took 1.02 seconds to deallocate network for instance. [ 1970.159914] env[62875]: DEBUG nova.compute.manager [None req-bf9872dc-26c3-43f6-9c0f-2d9857ce6aff tempest-ServersListShow296Test-242798754 tempest-ServersListShow296Test-242798754-project-member] [instance: 7d90a82f-3ee1-40c5-b351-87145b7b567c] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1970.264079] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1970.359018] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.661562] env[62875]: DEBUG nova.compute.manager [None req-bf9872dc-26c3-43f6-9c0f-2d9857ce6aff tempest-ServersListShow296Test-242798754 tempest-ServersListShow296Test-242798754-project-member] [instance: 7d90a82f-3ee1-40c5-b351-87145b7b567c] Instance disappeared before build. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 1970.689046] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44103c6-5f4d-4b5b-864c-ea15b09d6f60 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.697130] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc89df3-5aab-4d3f-9702-ae1e2c52d9f1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.734901] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0378b506-53e4-4df9-b923-007078e85391 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.743557] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9adf995-3e40-48b0-a60a-f6f4bb921aad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.757265] env[62875]: DEBUG nova.compute.provider_tree [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1970.861700] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Releasing lock "refresh_cache-4ee68d78-b265-4ee8-afcc-ce6ed150fb73" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.861700] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1970.861700] env[62875]: DEBUG nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1970.861700] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1970.876859] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1970.999405] env[62875]: INFO nova.scheduler.client.report [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Deleted allocations for instance 793da91d-461a-465b-b9a3-c5fa0f5b877d [ 1971.175861] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bf9872dc-26c3-43f6-9c0f-2d9857ce6aff tempest-ServersListShow296Test-242798754 tempest-ServersListShow296Test-242798754-project-member] Lock "7d90a82f-3ee1-40c5-b351-87145b7b567c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.170s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.261044] env[62875]: DEBUG nova.scheduler.client.report [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1971.379649] env[62875]: DEBUG nova.network.neutron [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1971.507546] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a0b7bfd6-ac5c-49a4-9c81-272c7106bf46 tempest-DeleteServersAdminTestJSON-309738948 tempest-DeleteServersAdminTestJSON-309738948-project-member] Lock "793da91d-461a-465b-b9a3-c5fa0f5b877d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.542s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.680536] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1971.706409] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1971.707026] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1971.707026] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1971.767421] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.029s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.768082] env[62875]: ERROR nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Traceback (most recent call last): [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self.driver.spawn(context, instance, image_meta, [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] vm_ref = self.build_virtual_machine(instance, [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] vif_infos = vmwarevif.get_vif_info(self._session, [ 1971.768082] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] for vif in network_info: [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] return self._sync_wrapper(fn, *args, **kwargs) [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self.wait() [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self[:] = self._gt.wait() [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] return self._exit_event.wait() [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] current.throw(*self._exc) [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1971.768391] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] result = function(*args, **kwargs) [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] return func(*args, **kwargs) [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] raise e [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] nwinfo = self.network_api.allocate_for_instance( [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] created_port_ids = self._update_ports_for_instance( [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] with excutils.save_and_reraise_exception(): [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] self.force_reraise() [ 1971.768774] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] raise self.value [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] updated_port = self._update_port( [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] _ensure_no_port_binding_failure(port) [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] raise exception.PortBindingFailed(port_id=port['id']) [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] nova.exception.PortBindingFailed: Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. [ 1971.769235] env[62875]: ERROR nova.compute.manager [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] [ 1971.769235] env[62875]: DEBUG nova.compute.utils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1971.770319] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.763s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.771900] env[62875]: INFO nova.compute.claims [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1971.775461] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Build of instance c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f was re-scheduled: Binding failed for port 24684040-75d9-482f-b8e8-74b5e3c5e1eb, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1971.775942] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1971.776220] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Acquiring lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.776298] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Acquired lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.776463] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1971.882471] env[62875]: INFO nova.compute.manager [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 4ee68d78-b265-4ee8-afcc-ce6ed150fb73] Took 1.02 seconds to deallocate network for instance. [ 1972.010620] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1972.191403] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "816e0ecb-6476-49bb-9fea-a01067f25b51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.191670] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "816e0ecb-6476-49bb-9fea-a01067f25b51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.202532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.211641] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1972.211755] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1972.211991] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: e11311ed-6804-4df4-a775-9060463ac927] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1972.212069] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1972.212142] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1972.212268] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1972.212455] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1972.212616] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1972.298468] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1972.381615] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1972.531804] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.715712] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.883819] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Releasing lock "refresh_cache-c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.884514] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1972.884514] env[62875]: DEBUG nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1972.884514] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1972.906304] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1972.916020] env[62875]: INFO nova.scheduler.client.report [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Deleted allocations for instance 4ee68d78-b265-4ee8-afcc-ce6ed150fb73 [ 1973.127588] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d394c295-e96a-49df-9576-3bb438fa62c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.135415] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59aa470f-a9aa-4455-9f2f-200bd42ec1be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.165885] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cfbeee-be3b-43c4-a5d3-a05ac4398581 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.172484] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29285b9d-e832-4176-bca3-d3e90459583e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.186801] env[62875]: DEBUG nova.compute.provider_tree [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1973.409022] env[62875]: DEBUG nova.network.neutron [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.426420] env[62875]: DEBUG oslo_concurrency.lockutils [None req-41fe125a-381f-4cca-9a99-13384c4cbb44 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "4ee68d78-b265-4ee8-afcc-ce6ed150fb73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.042s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.689989] env[62875]: DEBUG nova.scheduler.client.report [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1973.912269] env[62875]: INFO nova.compute.manager [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] [instance: c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f] Took 1.03 seconds to deallocate network for instance. [ 1973.929279] env[62875]: DEBUG nova.compute.manager [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1974.195307] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.195855] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1974.198933] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.089s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.455010] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.703680] env[62875]: DEBUG nova.compute.utils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1974.707666] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1974.707833] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1974.775220] env[62875]: DEBUG nova.policy [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3deb078a962a44a9860a879616f481ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8caa1f0194694e959245cc900954a2f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1974.958015] env[62875]: INFO nova.scheduler.client.report [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Deleted allocations for instance c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f [ 1975.058083] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe63606-c0c4-4211-afe4-f2ef37efb623 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.065333] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4baccd1-ec52-4387-b972-928c6128d599 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.100414] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f6b2f4-8dd0-43fc-b222-305524c12fbc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.107827] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b44e2a4-72ca-477a-a148-c6b414723323 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.122557] env[62875]: DEBUG nova.compute.provider_tree [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1975.211124] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1975.437663] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Successfully created port: 30c8dea9-637d-4ca6-9425-ea1a678bd89f {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1975.467589] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3ef47b5a-801f-4d70-8100-5286502c8acd tempest-ImagesOneServerNegativeTestJSON-63070236 tempest-ImagesOneServerNegativeTestJSON-63070236-project-member] Lock "c2923fdc-562c-4c5f-90b4-fcc76a2b7f7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.921s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.627437] env[62875]: DEBUG nova.scheduler.client.report [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1975.977866] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1976.136094] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.935s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1976.136094] env[62875]: ERROR nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. [ 1976.136094] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Traceback (most recent call last): [ 1976.136094] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1976.136094] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self.driver.spawn(context, instance, image_meta, [ 1976.136094] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1976.136094] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1976.136094] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1976.136094] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] vm_ref = self.build_virtual_machine(instance, [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] vif_infos = vmwarevif.get_vif_info(self._session, [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] for vif in network_info: [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] return self._sync_wrapper(fn, *args, **kwargs) [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self.wait() [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self[:] = self._gt.wait() [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] return self._exit_event.wait() [ 1976.136491] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] current.throw(*self._exc) [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] result = function(*args, **kwargs) [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] return func(*args, **kwargs) [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] raise e [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] nwinfo = self.network_api.allocate_for_instance( [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] created_port_ids = self._update_ports_for_instance( [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1976.136852] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] with excutils.save_and_reraise_exception(): [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] self.force_reraise() [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] raise self.value [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] updated_port = self._update_port( [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] _ensure_no_port_binding_failure(port) [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] raise exception.PortBindingFailed(port_id=port['id']) [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] nova.exception.PortBindingFailed: Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. [ 1976.137190] env[62875]: ERROR nova.compute.manager [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] [ 1976.137486] env[62875]: DEBUG nova.compute.utils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1976.141794] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Build of instance 6eb92420-57b1-4a7d-973f-10cd47be0416 was re-scheduled: Binding failed for port 7e57f7b8-c6c5-4e8c-b8f7-c6b3e8c4c8c2, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1976.141794] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1976.141794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquiring lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.141794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Acquired lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.142110] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1976.142110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.726s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1976.222451] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1976.260401] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1976.260729] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1976.260956] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1976.261289] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1976.261520] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1976.261622] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1976.261889] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1976.262140] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1976.262386] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1976.262614] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1976.262907] env[62875]: DEBUG nova.virt.hardware [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1976.264246] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d22f610-4aff-4b92-97ef-dd36a7f171c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.274222] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b32cd6-c0ef-46c4-895f-5341824401eb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.333592] env[62875]: DEBUG nova.compute.manager [req-1f23f1b1-4772-4b2e-8c7a-9a21f643eb28 req-fc2fa2bd-58df-4f2b-8f57-50030c709d30 service nova] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Received event network-changed-30c8dea9-637d-4ca6-9425-ea1a678bd89f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1976.333817] env[62875]: DEBUG nova.compute.manager [req-1f23f1b1-4772-4b2e-8c7a-9a21f643eb28 req-fc2fa2bd-58df-4f2b-8f57-50030c709d30 service nova] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Refreshing instance network info cache due to event network-changed-30c8dea9-637d-4ca6-9425-ea1a678bd89f. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1976.333997] env[62875]: DEBUG oslo_concurrency.lockutils [req-1f23f1b1-4772-4b2e-8c7a-9a21f643eb28 req-fc2fa2bd-58df-4f2b-8f57-50030c709d30 service nova] Acquiring lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.334154] env[62875]: DEBUG oslo_concurrency.lockutils [req-1f23f1b1-4772-4b2e-8c7a-9a21f643eb28 req-fc2fa2bd-58df-4f2b-8f57-50030c709d30 service nova] Acquired lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.334322] env[62875]: DEBUG nova.network.neutron [req-1f23f1b1-4772-4b2e-8c7a-9a21f643eb28 req-fc2fa2bd-58df-4f2b-8f57-50030c709d30 service nova] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Refreshing network info cache for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1976.416790] env[62875]: ERROR nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. [ 1976.416790] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1976.416790] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1976.416790] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1976.416790] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1976.416790] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1976.416790] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1976.416790] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1976.416790] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1976.416790] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1976.416790] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1976.416790] env[62875]: ERROR nova.compute.manager raise self.value [ 1976.416790] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1976.416790] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1976.416790] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1976.416790] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1976.417210] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1976.417210] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1976.417210] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. [ 1976.417210] env[62875]: ERROR nova.compute.manager [ 1976.417210] env[62875]: Traceback (most recent call last): [ 1976.417210] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1976.417210] env[62875]: listener.cb(fileno) [ 1976.417210] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1976.417210] env[62875]: result = function(*args, **kwargs) [ 1976.417210] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1976.417210] env[62875]: return func(*args, **kwargs) [ 1976.417210] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1976.417210] env[62875]: raise e [ 1976.417210] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1976.417210] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1976.417210] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1976.417210] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1976.417210] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1976.417210] env[62875]: with excutils.save_and_reraise_exception(): [ 1976.417210] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1976.417210] env[62875]: self.force_reraise() [ 1976.417210] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1976.417210] env[62875]: raise self.value [ 1976.417210] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1976.417210] env[62875]: updated_port = self._update_port( [ 1976.417210] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1976.417210] env[62875]: _ensure_no_port_binding_failure(port) [ 1976.417210] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1976.417210] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1976.417903] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. [ 1976.417903] env[62875]: Removing descriptor: 21 [ 1976.417903] env[62875]: ERROR nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Traceback (most recent call last): [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] yield resources [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self.driver.spawn(context, instance, image_meta, [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1976.417903] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] vm_ref = self.build_virtual_machine(instance, [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] for vif in network_info: [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] return self._sync_wrapper(fn, *args, **kwargs) [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self.wait() [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self[:] = self._gt.wait() [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] return self._exit_event.wait() [ 1976.418265] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] result = hub.switch() [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] return self.greenlet.switch() [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] result = function(*args, **kwargs) [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] return func(*args, **kwargs) [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] raise e [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] nwinfo = self.network_api.allocate_for_instance( [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1976.418672] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] created_port_ids = self._update_ports_for_instance( [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] with excutils.save_and_reraise_exception(): [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self.force_reraise() [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] raise self.value [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] updated_port = self._update_port( [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] _ensure_no_port_binding_failure(port) [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1976.419026] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] raise exception.PortBindingFailed(port_id=port['id']) [ 1976.419387] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] nova.exception.PortBindingFailed: Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. [ 1976.419387] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] [ 1976.419387] env[62875]: INFO nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Terminating instance [ 1976.501870] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.667760] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1976.749995] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.853267] env[62875]: DEBUG nova.network.neutron [req-1f23f1b1-4772-4b2e-8c7a-9a21f643eb28 req-fc2fa2bd-58df-4f2b-8f57-50030c709d30 service nova] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1976.922602] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Acquiring lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.947534] env[62875]: DEBUG nova.network.neutron [req-1f23f1b1-4772-4b2e-8c7a-9a21f643eb28 req-fc2fa2bd-58df-4f2b-8f57-50030c709d30 service nova] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.032896] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31687337-a0e2-4d91-96e4-fe24e3d0d3ac {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.044206] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73569284-d528-4dab-9bf2-995f4c434d9f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.086546] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6296d063-e00c-49af-ba55-c103179c30f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.094265] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd6334b-1ff6-412f-ac1c-455facad1a6c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.107503] env[62875]: DEBUG nova.compute.provider_tree [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1977.253160] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Releasing lock "refresh_cache-6eb92420-57b1-4a7d-973f-10cd47be0416" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.253448] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1977.253637] env[62875]: DEBUG nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1977.253806] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1977.279009] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1977.452641] env[62875]: DEBUG oslo_concurrency.lockutils [req-1f23f1b1-4772-4b2e-8c7a-9a21f643eb28 req-fc2fa2bd-58df-4f2b-8f57-50030c709d30 service nova] Releasing lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.452641] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Acquired lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.452641] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1977.615298] env[62875]: DEBUG nova.scheduler.client.report [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1977.782539] env[62875]: DEBUG nova.network.neutron [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.977161] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1978.115263] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.117366] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.976s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.117968] env[62875]: ERROR nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Traceback (most recent call last): [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self.driver.spawn(context, instance, image_meta, [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] vm_ref = self.build_virtual_machine(instance, [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] vif_infos = vmwarevif.get_vif_info(self._session, [ 1978.117968] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] for vif in network_info: [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] return self._sync_wrapper(fn, *args, **kwargs) [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self.wait() [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self[:] = self._gt.wait() [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] return self._exit_event.wait() [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] current.throw(*self._exc) [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1978.118424] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] result = function(*args, **kwargs) [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] return func(*args, **kwargs) [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] raise e [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] nwinfo = self.network_api.allocate_for_instance( [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] created_port_ids = self._update_ports_for_instance( [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] with excutils.save_and_reraise_exception(): [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] self.force_reraise() [ 1978.118916] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] raise self.value [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] updated_port = self._update_port( [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] _ensure_no_port_binding_failure(port) [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] raise exception.PortBindingFailed(port_id=port['id']) [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] nova.exception.PortBindingFailed: Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. [ 1978.121034] env[62875]: ERROR nova.compute.manager [instance: a19b7959-df3c-47e8-b920-edfe82c36489] [ 1978.121034] env[62875]: DEBUG nova.compute.utils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1978.121439] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.234s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.123463] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Build of instance a19b7959-df3c-47e8-b920-edfe82c36489 was re-scheduled: Binding failed for port 05ad56d5-0f49-49e7-ae3f-dc1280a7199d, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1978.123910] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1978.124155] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Acquiring lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.124746] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Acquired lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.124746] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1978.286673] env[62875]: INFO nova.compute.manager [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] [instance: 6eb92420-57b1-4a7d-973f-10cd47be0416] Took 1.03 seconds to deallocate network for instance. [ 1978.531965] env[62875]: DEBUG nova.compute.manager [req-8fb242d6-01b4-4e96-bb90-87f277abd741 req-5c61cc69-7322-4b62-b9da-8bfd66c00bd9 service nova] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Received event network-vif-deleted-30c8dea9-637d-4ca6-9425-ea1a678bd89f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1978.621018] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Releasing lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.621018] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1978.621018] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1978.621018] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3d7ccef-ee57-4ee7-8834-367cdc4cb91a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.636824] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c72e7c-90eb-435e-a0b3-f6f9770b01c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.666893] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 100c3541-3af3-4d3c-8060-2235f18f51e4 could not be found. [ 1978.667148] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1978.667338] env[62875]: INFO nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1978.667619] env[62875]: DEBUG oslo.service.loopingcall [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1978.667875] env[62875]: DEBUG nova.compute.manager [-] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1978.667988] env[62875]: DEBUG nova.network.neutron [-] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1978.670286] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1978.692199] env[62875]: DEBUG nova.network.neutron [-] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1978.788185] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.978942] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "2cf54268-5499-49c9-8029-68b3866581d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.978942] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "2cf54268-5499-49c9-8029-68b3866581d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.030843] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b885a2cb-ce09-4274-a560-2c031146723a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.038765] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2376b9-feb2-4c62-98a3-0e1848f345d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.068084] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6525c73-5b73-4dca-96e1-3c295c2564f4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.075782] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cdfad3-d159-45a6-922b-b88f9c50b05a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.089273] env[62875]: DEBUG nova.compute.provider_tree [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1979.195376] env[62875]: DEBUG nova.network.neutron [-] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.300279] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Releasing lock "refresh_cache-a19b7959-df3c-47e8-b920-edfe82c36489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.300682] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1979.300984] env[62875]: DEBUG nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1979.301173] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1979.322910] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1979.330986] env[62875]: INFO nova.scheduler.client.report [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Deleted allocations for instance 6eb92420-57b1-4a7d-973f-10cd47be0416 [ 1979.596954] env[62875]: DEBUG nova.scheduler.client.report [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1979.697682] env[62875]: INFO nova.compute.manager [-] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Took 1.03 seconds to deallocate network for instance. [ 1979.701337] env[62875]: DEBUG nova.compute.claims [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1979.701541] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.827588] env[62875]: DEBUG nova.network.neutron [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.840065] env[62875]: DEBUG oslo_concurrency.lockutils [None req-52a10725-8c4b-4612-9f71-bd0437f54ee5 tempest-ServersAdminTestJSON-841127122 tempest-ServersAdminTestJSON-841127122-project-member] Lock "6eb92420-57b1-4a7d-973f-10cd47be0416" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.402s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.101090] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.981s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.101800] env[62875]: ERROR nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] Traceback (most recent call last): [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self.driver.spawn(context, instance, image_meta, [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] vm_ref = self.build_virtual_machine(instance, [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] vif_infos = vmwarevif.get_vif_info(self._session, [ 1980.101800] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] for vif in network_info: [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] return self._sync_wrapper(fn, *args, **kwargs) [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self.wait() [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self[:] = self._gt.wait() [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] return self._exit_event.wait() [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] current.throw(*self._exc) [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1980.102123] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] result = function(*args, **kwargs) [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] return func(*args, **kwargs) [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] raise e [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] nwinfo = self.network_api.allocate_for_instance( [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] created_port_ids = self._update_ports_for_instance( [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] with excutils.save_and_reraise_exception(): [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] self.force_reraise() [ 1980.102448] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] raise self.value [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] updated_port = self._update_port( [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] _ensure_no_port_binding_failure(port) [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] raise exception.PortBindingFailed(port_id=port['id']) [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] nova.exception.PortBindingFailed: Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. [ 1980.102759] env[62875]: ERROR nova.compute.manager [instance: e11311ed-6804-4df4-a775-9060463ac927] [ 1980.102759] env[62875]: DEBUG nova.compute.utils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1980.104790] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.840s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.109583] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Build of instance e11311ed-6804-4df4-a775-9060463ac927 was re-scheduled: Binding failed for port 06b387d5-481c-4736-9fe2-693ab649f0f5, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1980.109583] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1980.110419] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Acquiring lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.110419] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Acquired lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.110419] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1980.332145] env[62875]: INFO nova.compute.manager [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] [instance: a19b7959-df3c-47e8-b920-edfe82c36489] Took 1.03 seconds to deallocate network for instance. [ 1980.346881] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1980.649914] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1980.832782] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.871365] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.996214] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecfdf19-0c1c-43dc-b305-ce4a016134b9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.004437] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9557078e-bbb9-40d1-86e4-032bc1a7dcb2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.044710] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3b3b0a-62fc-4f15-8f76-a999bb9ffbf4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.052562] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513baeba-22ca-4812-aa29-084ad893d162 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.067943] env[62875]: DEBUG nova.compute.provider_tree [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.336068] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Releasing lock "refresh_cache-e11311ed-6804-4df4-a775-9060463ac927" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.336345] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1981.336533] env[62875]: DEBUG nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1981.336746] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1981.359166] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1981.371827] env[62875]: INFO nova.scheduler.client.report [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Deleted allocations for instance a19b7959-df3c-47e8-b920-edfe82c36489 [ 1981.572584] env[62875]: DEBUG nova.scheduler.client.report [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1981.863683] env[62875]: DEBUG nova.network.neutron [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.883950] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd79b49b-7895-469d-8d8d-5d9bcde206be tempest-ServersTestManualDisk-1878931573 tempest-ServersTestManualDisk-1878931573-project-member] Lock "a19b7959-df3c-47e8-b920-edfe82c36489" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.801s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.080323] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.976s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.080651] env[62875]: ERROR nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Traceback (most recent call last): [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self.driver.spawn(context, instance, image_meta, [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] vm_ref = self.build_virtual_machine(instance, [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] vif_infos = vmwarevif.get_vif_info(self._session, [ 1982.080651] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] for vif in network_info: [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] return self._sync_wrapper(fn, *args, **kwargs) [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self.wait() [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self[:] = self._gt.wait() [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] return self._exit_event.wait() [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] current.throw(*self._exc) [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1982.081762] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] result = function(*args, **kwargs) [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] return func(*args, **kwargs) [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] raise e [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] nwinfo = self.network_api.allocate_for_instance( [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] created_port_ids = self._update_ports_for_instance( [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] with excutils.save_and_reraise_exception(): [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] self.force_reraise() [ 1982.082430] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] raise self.value [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] updated_port = self._update_port( [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] _ensure_no_port_binding_failure(port) [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] raise exception.PortBindingFailed(port_id=port['id']) [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] nova.exception.PortBindingFailed: Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. [ 1982.082891] env[62875]: ERROR nova.compute.manager [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] [ 1982.082891] env[62875]: DEBUG nova.compute.utils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1982.083318] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.871s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.086588] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Build of instance d3270b4a-2b81-41f5-a2af-5b7f441e4a2d was re-scheduled: Binding failed for port 62c504ab-9c7d-4989-8611-c9cad1eb9826, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1982.087060] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1982.087288] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Acquiring lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.087441] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Acquired lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.087585] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1982.366237] env[62875]: INFO nova.compute.manager [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] [instance: e11311ed-6804-4df4-a775-9060463ac927] Took 1.03 seconds to deallocate network for instance. [ 1982.386202] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1982.628473] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1982.813643] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.912966] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.950432] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4ef0e9-b161-4482-90a4-1c1bb25c70db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.960015] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8af4d4-4c4d-44ea-9948-37e4d60e20fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.994315] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1cb9b4-7acf-41f9-8b66-0660867add14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.002661] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227bd19e-010f-472c-9700-fa931320f823 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.020964] env[62875]: DEBUG nova.compute.provider_tree [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1983.317806] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Releasing lock "refresh_cache-d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.318406] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1983.318406] env[62875]: DEBUG nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1983.319706] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1983.348564] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1983.408383] env[62875]: INFO nova.scheduler.client.report [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Deleted allocations for instance e11311ed-6804-4df4-a775-9060463ac927 [ 1983.528972] env[62875]: DEBUG nova.scheduler.client.report [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1983.736813] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquiring lock "9e0aaea6-96cf-494d-9f70-a709a47f9772" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.736813] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "9e0aaea6-96cf-494d-9f70-a709a47f9772" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.851191] env[62875]: DEBUG nova.network.neutron [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.920611] env[62875]: DEBUG oslo_concurrency.lockutils [None req-57435e8b-2531-4fe0-a37c-f2e271577652 tempest-ServerActionsTestJSON-1076213528 tempest-ServerActionsTestJSON-1076213528-project-member] Lock "e11311ed-6804-4df4-a775-9060463ac927" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.433s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.032553] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.950s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.034369] env[62875]: ERROR nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Traceback (most recent call last): [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self.driver.spawn(context, instance, image_meta, [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] vm_ref = self.build_virtual_machine(instance, [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] vif_infos = vmwarevif.get_vif_info(self._session, [ 1984.034369] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] for vif in network_info: [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] return self._sync_wrapper(fn, *args, **kwargs) [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self.wait() [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self[:] = self._gt.wait() [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] return self._exit_event.wait() [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] current.throw(*self._exc) [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1984.034741] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] result = function(*args, **kwargs) [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] return func(*args, **kwargs) [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] raise e [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] nwinfo = self.network_api.allocate_for_instance( [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] created_port_ids = self._update_ports_for_instance( [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] with excutils.save_and_reraise_exception(): [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] self.force_reraise() [ 1984.035097] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] raise self.value [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] updated_port = self._update_port( [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] _ensure_no_port_binding_failure(port) [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] raise exception.PortBindingFailed(port_id=port['id']) [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] nova.exception.PortBindingFailed: Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. [ 1984.035473] env[62875]: ERROR nova.compute.manager [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] [ 1984.036794] env[62875]: DEBUG nova.compute.utils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1984.038609] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.836s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1984.040583] env[62875]: INFO nova.compute.claims [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1984.043339] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Build of instance 782e6663-202b-4ed0-8a1a-cc54f246143b was re-scheduled: Binding failed for port 5fec3f59-70f3-4fa8-a84f-fedbfefdf6ea, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1984.043860] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1984.044146] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.044627] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.044858] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1984.358966] env[62875]: INFO nova.compute.manager [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] [instance: d3270b4a-2b81-41f5-a2af-5b7f441e4a2d] Took 1.04 seconds to deallocate network for instance. [ 1984.426882] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1984.589020] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1984.762635] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.955706] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.267125] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "refresh_cache-782e6663-202b-4ed0-8a1a-cc54f246143b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.267376] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1985.267561] env[62875]: DEBUG nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1985.267728] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1985.289098] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1985.397163] env[62875]: INFO nova.scheduler.client.report [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Deleted allocations for instance d3270b4a-2b81-41f5-a2af-5b7f441e4a2d [ 1985.426186] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954de357-18af-46f8-93d6-5d4c20079677 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.435434] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbec15ec-aad0-45f7-b696-d44bf5e7a9f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.466113] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc01993-f2b6-4d63-91b1-ff168f8f54f7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.473273] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0426e239-a97f-45c3-8e71-171665215528 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.491451] env[62875]: DEBUG nova.compute.provider_tree [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1985.792356] env[62875]: DEBUG nova.network.neutron [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.906443] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e90f53e-88b9-460b-b3b7-b296c368c343 tempest-ServersTestJSON-850029745 tempest-ServersTestJSON-850029745-project-member] Lock "d3270b4a-2b81-41f5-a2af-5b7f441e4a2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.848s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.998288] env[62875]: DEBUG nova.scheduler.client.report [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1986.295436] env[62875]: INFO nova.compute.manager [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 782e6663-202b-4ed0-8a1a-cc54f246143b] Took 1.03 seconds to deallocate network for instance. [ 1986.415725] env[62875]: DEBUG nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1986.502420] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.464s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.502941] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1986.505577] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.974s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.507106] env[62875]: INFO nova.compute.claims [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1986.950482] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.011377] env[62875]: DEBUG nova.compute.utils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1987.012947] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1987.013153] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1987.250387] env[62875]: DEBUG nova.policy [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71edb09414414207931ab93b6db576d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6cef4dddbee94e1bbcaed28392a8a19b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1987.328740] env[62875]: INFO nova.scheduler.client.report [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleted allocations for instance 782e6663-202b-4ed0-8a1a-cc54f246143b [ 1987.519440] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1987.840103] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8567d6ea-ee94-4c86-b7da-7299f076424a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "782e6663-202b-4ed0-8a1a-cc54f246143b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.803s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.873697] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d547db-3507-4dc5-b49c-3f6a32b9e7ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.882391] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9b9cdb-5653-49f8-8f29-ac10bd007285 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.918944] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2ea3a1-1f10-43c8-aad4-deaef68d3086 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.926325] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3ad89c-26ac-4e71-ae4f-cc14d3e486e2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.941403] env[62875]: DEBUG nova.compute.provider_tree [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1988.029920] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Successfully created port: 484f1d78-c983-4016-9554-d37475fe8aa6 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1988.345409] env[62875]: DEBUG nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1988.445966] env[62875]: DEBUG nova.scheduler.client.report [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1988.535949] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1988.568052] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1988.568052] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1988.568052] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1988.568266] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1988.568266] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1988.568550] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1988.570037] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1988.570037] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1988.570037] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1988.570037] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1988.570037] env[62875]: DEBUG nova.virt.hardware [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1988.570583] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27cb7c8-a8f4-4bc1-85ab-1699c57eec7f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.580089] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bf98b4-aedf-4885-8534-efc59cf94b11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.874370] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.952843] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.954062] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1988.956656] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.241s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.956926] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.957039] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1988.957329] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.503s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.958936] env[62875]: INFO nova.compute.claims [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1988.963598] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535e0296-9ca1-4924-9ac4-05f483f708b3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.974340] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ba7074-3bd5-4dc4-81bb-afb248ef2279 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.990760] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3c9172-4e98-4556-a81d-9bf7763c39b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.003703] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ec31ff-958c-4252-9bf6-d49ef04066bd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.037902] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181286MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1989.038175] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.395408] env[62875]: DEBUG nova.compute.manager [req-78d7fa40-d937-4e7e-a50e-8909886a7880 req-d2e0f5a5-e6ac-4899-a376-175c25546603 service nova] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Received event network-changed-484f1d78-c983-4016-9554-d37475fe8aa6 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1989.395408] env[62875]: DEBUG nova.compute.manager [req-78d7fa40-d937-4e7e-a50e-8909886a7880 req-d2e0f5a5-e6ac-4899-a376-175c25546603 service nova] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Refreshing instance network info cache due to event network-changed-484f1d78-c983-4016-9554-d37475fe8aa6. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1989.395408] env[62875]: DEBUG oslo_concurrency.lockutils [req-78d7fa40-d937-4e7e-a50e-8909886a7880 req-d2e0f5a5-e6ac-4899-a376-175c25546603 service nova] Acquiring lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.395408] env[62875]: DEBUG oslo_concurrency.lockutils [req-78d7fa40-d937-4e7e-a50e-8909886a7880 req-d2e0f5a5-e6ac-4899-a376-175c25546603 service nova] Acquired lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.396243] env[62875]: DEBUG nova.network.neutron [req-78d7fa40-d937-4e7e-a50e-8909886a7880 req-d2e0f5a5-e6ac-4899-a376-175c25546603 service nova] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Refreshing network info cache for port 484f1d78-c983-4016-9554-d37475fe8aa6 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1989.466151] env[62875]: DEBUG nova.compute.utils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1989.466151] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1989.466325] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1989.547050] env[62875]: DEBUG nova.policy [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7734c32417d340afa034e98dd615bcbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0dc4df9df7ec48bf9fbf765d48a4308b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1989.608630] env[62875]: ERROR nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. [ 1989.608630] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1989.608630] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1989.608630] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1989.608630] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1989.608630] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1989.608630] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1989.608630] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1989.608630] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1989.608630] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1989.608630] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1989.608630] env[62875]: ERROR nova.compute.manager raise self.value [ 1989.608630] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1989.608630] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1989.608630] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1989.608630] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1989.609212] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1989.609212] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1989.609212] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. [ 1989.609212] env[62875]: ERROR nova.compute.manager [ 1989.609212] env[62875]: Traceback (most recent call last): [ 1989.613015] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1989.613015] env[62875]: listener.cb(fileno) [ 1989.613015] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1989.613015] env[62875]: result = function(*args, **kwargs) [ 1989.613015] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1989.613015] env[62875]: return func(*args, **kwargs) [ 1989.613015] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1989.613015] env[62875]: raise e [ 1989.613015] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1989.613015] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1989.613015] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1989.613015] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1989.613015] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1989.613015] env[62875]: with excutils.save_and_reraise_exception(): [ 1989.613015] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1989.613015] env[62875]: self.force_reraise() [ 1989.613015] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1989.613015] env[62875]: raise self.value [ 1989.613015] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1989.613015] env[62875]: updated_port = self._update_port( [ 1989.613015] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1989.613015] env[62875]: _ensure_no_port_binding_failure(port) [ 1989.613015] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1989.613015] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1989.613015] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. [ 1989.613015] env[62875]: Removing descriptor: 21 [ 1989.613820] env[62875]: ERROR nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Traceback (most recent call last): [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] yield resources [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self.driver.spawn(context, instance, image_meta, [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] vm_ref = self.build_virtual_machine(instance, [ 1989.613820] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] vif_infos = vmwarevif.get_vif_info(self._session, [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] for vif in network_info: [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] return self._sync_wrapper(fn, *args, **kwargs) [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self.wait() [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self[:] = self._gt.wait() [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] return self._exit_event.wait() [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1989.614165] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] result = hub.switch() [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] return self.greenlet.switch() [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] result = function(*args, **kwargs) [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] return func(*args, **kwargs) [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] raise e [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] nwinfo = self.network_api.allocate_for_instance( [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] created_port_ids = self._update_ports_for_instance( [ 1989.614518] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] with excutils.save_and_reraise_exception(): [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self.force_reraise() [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] raise self.value [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] updated_port = self._update_port( [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] _ensure_no_port_binding_failure(port) [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] raise exception.PortBindingFailed(port_id=port['id']) [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] nova.exception.PortBindingFailed: Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. [ 1989.614869] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] [ 1989.615250] env[62875]: INFO nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Terminating instance [ 1989.915758] env[62875]: DEBUG nova.network.neutron [req-78d7fa40-d937-4e7e-a50e-8909886a7880 req-d2e0f5a5-e6ac-4899-a376-175c25546603 service nova] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1989.969610] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1990.050814] env[62875]: DEBUG nova.network.neutron [req-78d7fa40-d937-4e7e-a50e-8909886a7880 req-d2e0f5a5-e6ac-4899-a376-175c25546603 service nova] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.121400] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Acquiring lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.294968] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Successfully created port: 4d91b54d-40d6-45e3-b2cc-7db118f143cc {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1990.314817] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0ff518-2516-4cf5-a6e1-63d270b0c9c0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.332191] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb42e1a7-791b-47c1-b1a2-1c63aae7b388 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.371432] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc173f0-8546-4624-83a0-77ea5650668e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.385567] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b567e089-65fd-43c8-abd3-71b9cadfaaed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.411338] env[62875]: DEBUG nova.compute.provider_tree [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1990.556517] env[62875]: DEBUG oslo_concurrency.lockutils [req-78d7fa40-d937-4e7e-a50e-8909886a7880 req-d2e0f5a5-e6ac-4899-a376-175c25546603 service nova] Releasing lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.556840] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Acquired lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.556840] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1990.915137] env[62875]: DEBUG nova.scheduler.client.report [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1990.982220] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1991.017060] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1991.017323] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1991.017480] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1991.018197] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1991.018381] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1991.018572] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1991.018749] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1991.018922] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1991.019112] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1991.019286] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1991.019456] env[62875]: DEBUG nova.virt.hardware [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1991.020325] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299c64d1-330e-43f0-a1ec-19c14be7ef3f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.029716] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599525f5-d6fc-4341-bc7d-d72bd3c6cbb3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.064806] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.065102] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.086888] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1991.259615] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.359496] env[62875]: DEBUG nova.compute.manager [req-351f38cf-34c5-43ad-ab8a-c4d3757f09ec req-39bf5952-f5f0-4def-b7cc-efc898d92b54 service nova] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Received event network-changed-4d91b54d-40d6-45e3-b2cc-7db118f143cc {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1991.359684] env[62875]: DEBUG nova.compute.manager [req-351f38cf-34c5-43ad-ab8a-c4d3757f09ec req-39bf5952-f5f0-4def-b7cc-efc898d92b54 service nova] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Refreshing instance network info cache due to event network-changed-4d91b54d-40d6-45e3-b2cc-7db118f143cc. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1991.359911] env[62875]: DEBUG oslo_concurrency.lockutils [req-351f38cf-34c5-43ad-ab8a-c4d3757f09ec req-39bf5952-f5f0-4def-b7cc-efc898d92b54 service nova] Acquiring lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.360053] env[62875]: DEBUG oslo_concurrency.lockutils [req-351f38cf-34c5-43ad-ab8a-c4d3757f09ec req-39bf5952-f5f0-4def-b7cc-efc898d92b54 service nova] Acquired lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1991.360198] env[62875]: DEBUG nova.network.neutron [req-351f38cf-34c5-43ad-ab8a-c4d3757f09ec req-39bf5952-f5f0-4def-b7cc-efc898d92b54 service nova] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Refreshing network info cache for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1991.420707] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.421249] env[62875]: DEBUG nova.compute.manager [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1991.428380] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.927s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.431697] env[62875]: INFO nova.compute.claims [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1991.439127] env[62875]: DEBUG nova.compute.manager [req-85ae4a87-e3f7-4668-9488-89030e235f3a req-3e170f8b-06d8-44c0-931b-fd5c60653295 service nova] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Received event network-vif-deleted-484f1d78-c983-4016-9554-d37475fe8aa6 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1991.569560] env[62875]: ERROR nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. [ 1991.569560] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1991.569560] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1991.569560] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1991.569560] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1991.569560] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1991.569560] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1991.569560] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1991.569560] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1991.569560] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1991.569560] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1991.569560] env[62875]: ERROR nova.compute.manager raise self.value [ 1991.569560] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1991.569560] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1991.569560] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1991.569560] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1991.570140] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1991.570140] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1991.570140] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. [ 1991.570140] env[62875]: ERROR nova.compute.manager [ 1991.570140] env[62875]: Traceback (most recent call last): [ 1991.570140] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1991.570140] env[62875]: listener.cb(fileno) [ 1991.570140] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1991.570140] env[62875]: result = function(*args, **kwargs) [ 1991.570140] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1991.570140] env[62875]: return func(*args, **kwargs) [ 1991.570140] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1991.570140] env[62875]: raise e [ 1991.570140] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1991.570140] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1991.570140] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1991.570140] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1991.570140] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1991.570140] env[62875]: with excutils.save_and_reraise_exception(): [ 1991.570140] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1991.570140] env[62875]: self.force_reraise() [ 1991.570140] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1991.570140] env[62875]: raise self.value [ 1991.570140] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1991.570140] env[62875]: updated_port = self._update_port( [ 1991.570140] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1991.570140] env[62875]: _ensure_no_port_binding_failure(port) [ 1991.570140] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1991.570140] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1991.571026] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. [ 1991.571026] env[62875]: Removing descriptor: 18 [ 1991.571026] env[62875]: ERROR nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Traceback (most recent call last): [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] yield resources [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self.driver.spawn(context, instance, image_meta, [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1991.571026] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] vm_ref = self.build_virtual_machine(instance, [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] for vif in network_info: [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] return self._sync_wrapper(fn, *args, **kwargs) [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self.wait() [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self[:] = self._gt.wait() [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] return self._exit_event.wait() [ 1991.571411] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] result = hub.switch() [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] return self.greenlet.switch() [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] result = function(*args, **kwargs) [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] return func(*args, **kwargs) [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] raise e [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] nwinfo = self.network_api.allocate_for_instance( [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1991.571876] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] created_port_ids = self._update_ports_for_instance( [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] with excutils.save_and_reraise_exception(): [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self.force_reraise() [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] raise self.value [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] updated_port = self._update_port( [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] _ensure_no_port_binding_failure(port) [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1991.572352] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] raise exception.PortBindingFailed(port_id=port['id']) [ 1991.572802] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] nova.exception.PortBindingFailed: Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. [ 1991.572802] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] [ 1991.572802] env[62875]: INFO nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Terminating instance [ 1991.763577] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Releasing lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.764044] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1991.764247] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1991.764555] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d454a330-8eb7-49ca-872b-580b2b805435 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.778999] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aeb2d71-4574-47ea-b1c6-5fa1b4daefea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.804118] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 77d57f64-9bab-46f1-87b4-62bac5c5d2bd could not be found. [ 1991.804373] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1991.804561] env[62875]: INFO nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1991.804812] env[62875]: DEBUG oslo.service.loopingcall [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1991.805387] env[62875]: DEBUG nova.compute.manager [-] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1991.805387] env[62875]: DEBUG nova.network.neutron [-] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1991.826202] env[62875]: DEBUG nova.network.neutron [-] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1991.890550] env[62875]: DEBUG nova.network.neutron [req-351f38cf-34c5-43ad-ab8a-c4d3757f09ec req-39bf5952-f5f0-4def-b7cc-efc898d92b54 service nova] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1991.939859] env[62875]: DEBUG nova.compute.utils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1991.941598] env[62875]: DEBUG nova.compute.manager [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Not allocating networking since 'none' was specified. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1992.036522] env[62875]: DEBUG nova.network.neutron [req-351f38cf-34c5-43ad-ab8a-c4d3757f09ec req-39bf5952-f5f0-4def-b7cc-efc898d92b54 service nova] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.075212] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.329993] env[62875]: DEBUG nova.network.neutron [-] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.446346] env[62875]: DEBUG nova.compute.manager [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1992.541776] env[62875]: DEBUG oslo_concurrency.lockutils [req-351f38cf-34c5-43ad-ab8a-c4d3757f09ec req-39bf5952-f5f0-4def-b7cc-efc898d92b54 service nova] Releasing lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1992.541891] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquired lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.542104] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1992.711226] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquiring lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.712610] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.819017] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73800f45-83ae-4a7d-9ff7-ed9054bf364a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.826163] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6842c2e5-ad36-44b5-b60f-b936d05eeea0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.831093] env[62875]: INFO nova.compute.manager [-] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Took 1.03 seconds to deallocate network for instance. [ 1992.864643] env[62875]: DEBUG nova.compute.claims [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1992.865032] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.865892] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164b1ecb-69e4-4008-8c31-bcc339545388 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.873750] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5063aa27-84b9-4cef-80dc-0d63410e9180 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.889061] env[62875]: DEBUG nova.compute.provider_tree [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1993.067415] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1993.170396] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.392238] env[62875]: DEBUG nova.scheduler.client.report [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1993.460749] env[62875]: DEBUG nova.compute.manager [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1993.484794] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1993.485063] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1993.485231] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1993.485414] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1993.486972] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1993.486972] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1993.486972] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1993.486972] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1993.486972] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1993.487303] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1993.487303] env[62875]: DEBUG nova.virt.hardware [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1993.487461] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc92ed9-7568-4912-bfad-fd24d2de2c5c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.491738] env[62875]: DEBUG nova.compute.manager [req-5ba63f0c-c7ce-432d-aa20-cacd4f4c9e0b req-7e96d401-38d4-4159-8af7-06b750127ace service nova] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Received event network-vif-deleted-4d91b54d-40d6-45e3-b2cc-7db118f143cc {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1993.499020] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6bdc52-3add-4fd1-b342-7e434ac1188c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.512454] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1993.518200] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Creating folder: Project (cecc85bf52124533aedd389f2e186f7b). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1993.518498] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66a05e90-f045-498c-904a-7a366afe28d3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.530195] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Created folder: Project (cecc85bf52124533aedd389f2e186f7b) in parent group-v444854. [ 1993.530401] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Creating folder: Instances. Parent ref: group-v444862. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1993.531422] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a85a6c0-d6c6-4085-9630-8aaa5e51b460 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.539574] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Created folder: Instances in parent group-v444862. [ 1993.540617] env[62875]: DEBUG oslo.service.loopingcall [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1993.540617] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1993.540617] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5dada4a2-737c-424f-b837-4e9d2234f059 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.558888] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1993.558888] env[62875]: value = "task-2179942" [ 1993.558888] env[62875]: _type = "Task" [ 1993.558888] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.565491] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179942, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.673280] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Releasing lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1993.674437] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1993.674831] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1993.674936] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc479b5f-4154-4aae-bb33-5304cd62ea55 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.691237] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae38cbd-00e1-4c94-b12b-d6d848e07caa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.719587] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f190f86-2faa-4b8e-821f-2113577541e4 could not be found. [ 1993.720020] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1993.720389] env[62875]: INFO nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1993.720773] env[62875]: DEBUG oslo.service.loopingcall [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1993.722028] env[62875]: DEBUG nova.compute.manager [-] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1993.722028] env[62875]: DEBUG nova.network.neutron [-] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1993.748022] env[62875]: DEBUG nova.network.neutron [-] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1993.899843] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.900411] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1993.903144] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.202s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.068015] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179942, 'name': CreateVM_Task, 'duration_secs': 0.280461} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.068227] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1994.068654] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.068806] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.069192] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1994.069452] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e67609e8-d244-4afe-81d7-c998772954cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.073885] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 1994.073885] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529af0b0-780d-a7cb-fceb-53f44292ddd3" [ 1994.073885] env[62875]: _type = "Task" [ 1994.073885] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.081712] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529af0b0-780d-a7cb-fceb-53f44292ddd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.251447] env[62875]: DEBUG nova.network.neutron [-] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.408261] env[62875]: DEBUG nova.compute.utils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1994.414925] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1994.415652] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1994.498680] env[62875]: DEBUG nova.policy [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7734c32417d340afa034e98dd615bcbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0dc4df9df7ec48bf9fbf765d48a4308b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1994.588947] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529af0b0-780d-a7cb-fceb-53f44292ddd3, 'name': SearchDatastore_Task, 'duration_secs': 0.017444} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.593224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1994.593651] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1994.594040] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.594298] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.594595] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1994.595278] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3e9bdae-b09c-4cce-8039-301aa0afa9ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.605595] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1994.605950] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1994.610981] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a810b54c-623b-414e-b6c3-e0c71f7bedeb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.620118] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 1994.620118] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52734a41-50a2-4698-96dd-566e030fd765" [ 1994.620118] env[62875]: _type = "Task" [ 1994.620118] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.631393] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52734a41-50a2-4698-96dd-566e030fd765, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.757689] env[62875]: INFO nova.compute.manager [-] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Took 1.04 seconds to deallocate network for instance. [ 1994.761126] env[62875]: DEBUG nova.compute.claims [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1994.763921] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.822922] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366495ed-5916-422d-af67-7439396e852e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.831165] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477446c9-38a2-43e8-9886-ba0e025e18f0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.862920] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ed5a42-29d2-4533-b59a-725d6e9d657b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.873707] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faff6fc3-79a3-4b8e-bfae-3867c3995d6c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.890075] env[62875]: DEBUG nova.compute.provider_tree [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1994.920982] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1994.974785] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Successfully created port: 2cdce133-68ef-43b8-9b99-27031c91d8e3 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1995.131748] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52734a41-50a2-4698-96dd-566e030fd765, 'name': SearchDatastore_Task, 'duration_secs': 0.010563} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.132052] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fa0a9fd-bef1-41fe-bf14-a804275bf9bc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.137575] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 1995.137575] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527610a8-a291-32f3-36c4-644544898d9b" [ 1995.137575] env[62875]: _type = "Task" [ 1995.137575] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.146033] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527610a8-a291-32f3-36c4-644544898d9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.394334] env[62875]: DEBUG nova.scheduler.client.report [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1995.651405] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527610a8-a291-32f3-36c4-644544898d9b, 'name': SearchDatastore_Task, 'duration_secs': 0.043157} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.652023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.652614] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1995.653178] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-634b6384-c161-45dc-a7bb-d313c301c6b9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.662655] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 1995.662655] env[62875]: value = "task-2179943" [ 1995.662655] env[62875]: _type = "Task" [ 1995.662655] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.674812] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179943, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.902023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.997s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.902023] env[62875]: ERROR nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. [ 1995.902023] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Traceback (most recent call last): [ 1995.902023] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1995.902023] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self.driver.spawn(context, instance, image_meta, [ 1995.902023] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1995.902023] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1995.902023] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1995.902023] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] vm_ref = self.build_virtual_machine(instance, [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] for vif in network_info: [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] return self._sync_wrapper(fn, *args, **kwargs) [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self.wait() [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self[:] = self._gt.wait() [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] return self._exit_event.wait() [ 1995.902500] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] result = hub.switch() [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] return self.greenlet.switch() [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] result = function(*args, **kwargs) [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] return func(*args, **kwargs) [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] raise e [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] nwinfo = self.network_api.allocate_for_instance( [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1995.902972] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] created_port_ids = self._update_ports_for_instance( [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] with excutils.save_and_reraise_exception(): [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] self.force_reraise() [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] raise self.value [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] updated_port = self._update_port( [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] _ensure_no_port_binding_failure(port) [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1995.903426] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] raise exception.PortBindingFailed(port_id=port['id']) [ 1995.903881] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] nova.exception.PortBindingFailed: Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. [ 1995.903881] env[62875]: ERROR nova.compute.manager [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] [ 1995.903881] env[62875]: DEBUG nova.compute.utils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1995.907462] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.034s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.907462] env[62875]: INFO nova.compute.claims [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1995.912769] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Build of instance 100c3541-3af3-4d3c-8060-2235f18f51e4 was re-scheduled: Binding failed for port 30c8dea9-637d-4ca6-9425-ea1a678bd89f, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1995.913491] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1995.915653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Acquiring lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.915653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Acquired lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.915653] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1995.933102] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1995.968250] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1995.968506] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1995.968660] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1995.969524] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1995.969524] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1995.969524] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1995.969524] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1995.969751] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1995.969810] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1995.969974] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1995.970199] env[62875]: DEBUG nova.virt.hardware [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1995.971599] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74360f40-375b-4668-a147-a26d4f91afab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.983441] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92694556-0d5a-4fd3-b382-d6681022ffd0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.172070] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179943, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469679} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.172344] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1996.172555] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1996.172803] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a4a2ec9-54c4-42d6-abd5-cfc2ba3a0fbf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.180131] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 1996.180131] env[62875]: value = "task-2179944" [ 1996.180131] env[62875]: _type = "Task" [ 1996.180131] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.189061] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.217831] env[62875]: DEBUG nova.compute.manager [req-901493d0-7aab-451d-8075-d589f610dc43 req-6f964191-cbbc-4e06-becb-6f88bbc30e04 service nova] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Received event network-changed-2cdce133-68ef-43b8-9b99-27031c91d8e3 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1996.218045] env[62875]: DEBUG nova.compute.manager [req-901493d0-7aab-451d-8075-d589f610dc43 req-6f964191-cbbc-4e06-becb-6f88bbc30e04 service nova] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Refreshing instance network info cache due to event network-changed-2cdce133-68ef-43b8-9b99-27031c91d8e3. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1996.218285] env[62875]: DEBUG oslo_concurrency.lockutils [req-901493d0-7aab-451d-8075-d589f610dc43 req-6f964191-cbbc-4e06-becb-6f88bbc30e04 service nova] Acquiring lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1996.218480] env[62875]: DEBUG oslo_concurrency.lockutils [req-901493d0-7aab-451d-8075-d589f610dc43 req-6f964191-cbbc-4e06-becb-6f88bbc30e04 service nova] Acquired lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1996.218742] env[62875]: DEBUG nova.network.neutron [req-901493d0-7aab-451d-8075-d589f610dc43 req-6f964191-cbbc-4e06-becb-6f88bbc30e04 service nova] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Refreshing network info cache for port 2cdce133-68ef-43b8-9b99-27031c91d8e3 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1996.376271] env[62875]: ERROR nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. [ 1996.376271] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 1996.376271] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1996.376271] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1996.376271] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1996.376271] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1996.376271] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1996.376271] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1996.376271] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1996.376271] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 1996.376271] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1996.376271] env[62875]: ERROR nova.compute.manager raise self.value [ 1996.376271] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1996.376271] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 1996.376271] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1996.376271] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1996.376840] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1996.376840] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1996.376840] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. [ 1996.376840] env[62875]: ERROR nova.compute.manager [ 1996.376840] env[62875]: Traceback (most recent call last): [ 1996.376840] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1996.376840] env[62875]: listener.cb(fileno) [ 1996.376840] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1996.376840] env[62875]: result = function(*args, **kwargs) [ 1996.376840] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1996.376840] env[62875]: return func(*args, **kwargs) [ 1996.376840] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1996.376840] env[62875]: raise e [ 1996.376840] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1996.376840] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 1996.376840] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1996.376840] env[62875]: created_port_ids = self._update_ports_for_instance( [ 1996.376840] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1996.376840] env[62875]: with excutils.save_and_reraise_exception(): [ 1996.376840] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1996.376840] env[62875]: self.force_reraise() [ 1996.376840] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1996.376840] env[62875]: raise self.value [ 1996.376840] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1996.376840] env[62875]: updated_port = self._update_port( [ 1996.376840] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1996.376840] env[62875]: _ensure_no_port_binding_failure(port) [ 1996.376840] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1996.376840] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 1996.377877] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. [ 1996.377877] env[62875]: Removing descriptor: 18 [ 1996.377877] env[62875]: ERROR nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Traceback (most recent call last): [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] yield resources [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self.driver.spawn(context, instance, image_meta, [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1996.377877] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] vm_ref = self.build_virtual_machine(instance, [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] vif_infos = vmwarevif.get_vif_info(self._session, [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] for vif in network_info: [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] return self._sync_wrapper(fn, *args, **kwargs) [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self.wait() [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self[:] = self._gt.wait() [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] return self._exit_event.wait() [ 1996.378290] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] result = hub.switch() [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] return self.greenlet.switch() [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] result = function(*args, **kwargs) [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] return func(*args, **kwargs) [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] raise e [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] nwinfo = self.network_api.allocate_for_instance( [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1996.378718] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] created_port_ids = self._update_ports_for_instance( [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] with excutils.save_and_reraise_exception(): [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self.force_reraise() [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] raise self.value [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] updated_port = self._update_port( [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] _ensure_no_port_binding_failure(port) [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1996.379219] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] raise exception.PortBindingFailed(port_id=port['id']) [ 1996.379619] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] nova.exception.PortBindingFailed: Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. [ 1996.379619] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] [ 1996.379619] env[62875]: INFO nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Terminating instance [ 1996.438062] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1996.535995] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.691160] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065183} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.691436] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1996.692224] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eab7157-1084-4282-939e-0debfd9afb65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.711025] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1996.711266] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a579e5b6-bd64-4425-a5e1-89136dd4419a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.732726] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 1996.732726] env[62875]: value = "task-2179945" [ 1996.732726] env[62875]: _type = "Task" [ 1996.732726] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.740873] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179945, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.741672] env[62875]: DEBUG nova.network.neutron [req-901493d0-7aab-451d-8075-d589f610dc43 req-6f964191-cbbc-4e06-becb-6f88bbc30e04 service nova] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1996.833612] env[62875]: DEBUG nova.network.neutron [req-901493d0-7aab-451d-8075-d589f610dc43 req-6f964191-cbbc-4e06-becb-6f88bbc30e04 service nova] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.881637] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1997.038942] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Releasing lock "refresh_cache-100c3541-3af3-4d3c-8060-2235f18f51e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1997.039384] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1997.039588] env[62875]: DEBUG nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1997.040247] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1997.057789] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1997.206429] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ae9214-63fb-4bdc-a2e7-629adfa2c124 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.213423] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e175f12-6cd8-4005-8e5e-a94e5b0a7676 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.246513] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61d944d-c0e9-4843-9dc9-a1ce2d0d6de9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.256787] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0048565-26b3-4ec7-902a-bb68fa1c2c56 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.260457] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179945, 'name': ReconfigVM_Task, 'duration_secs': 0.254855} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.260713] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1997.261590] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e2be3d1-9abf-445a-9403-94af713dad3f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.270636] env[62875]: DEBUG nova.compute.provider_tree [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1997.273023] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 1997.273023] env[62875]: value = "task-2179946" [ 1997.273023] env[62875]: _type = "Task" [ 1997.273023] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.280105] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179946, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.336767] env[62875]: DEBUG oslo_concurrency.lockutils [req-901493d0-7aab-451d-8075-d589f610dc43 req-6f964191-cbbc-4e06-becb-6f88bbc30e04 service nova] Releasing lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1997.336767] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquired lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1997.336942] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1997.560937] env[62875]: DEBUG nova.network.neutron [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1997.774581] env[62875]: DEBUG nova.scheduler.client.report [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1997.786647] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179946, 'name': Rename_Task, 'duration_secs': 0.134388} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.786900] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1997.788407] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df7fcee5-4e8c-4662-9383-a46e579b31e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.793502] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 1997.793502] env[62875]: value = "task-2179947" [ 1997.793502] env[62875]: _type = "Task" [ 1997.793502] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.801789] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.854967] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1997.939998] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.063693] env[62875]: INFO nova.compute.manager [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] [instance: 100c3541-3af3-4d3c-8060-2235f18f51e4] Took 1.02 seconds to deallocate network for instance. [ 1998.241753] env[62875]: DEBUG nova.compute.manager [req-2b268f70-423b-4fd5-9a46-80b0c07af056 req-447b77f9-23f8-42e8-b7aa-c4e85e705c3e service nova] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Received event network-vif-deleted-2cdce133-68ef-43b8-9b99-27031c91d8e3 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1998.282464] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.282928] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1998.285314] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.372s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.286797] env[62875]: INFO nova.compute.claims [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1998.305030] env[62875]: DEBUG oslo_vmware.api [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179947, 'name': PowerOnVM_Task, 'duration_secs': 0.401731} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.305030] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1998.305030] env[62875]: INFO nova.compute.manager [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Took 4.84 seconds to spawn the instance on the hypervisor. [ 1998.305249] env[62875]: DEBUG nova.compute.manager [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1998.305943] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e96d7a-ac84-433c-b32c-c13c0f4f44c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.443134] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Releasing lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1998.443574] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1998.443768] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1998.444514] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2cc5bf0-f30e-43f5-aee7-1ba45ba10fa0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.453499] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e01358b-d74e-4921-84ec-6432636c19d1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.476243] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738 could not be found. [ 1998.476463] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.476648] env[62875]: INFO nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1998.476888] env[62875]: DEBUG oslo.service.loopingcall [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1998.477133] env[62875]: DEBUG nova.compute.manager [-] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1998.477230] env[62875]: DEBUG nova.network.neutron [-] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1998.500831] env[62875]: DEBUG nova.network.neutron [-] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1998.791101] env[62875]: DEBUG nova.compute.utils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1998.794709] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1998.794878] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1998.820977] env[62875]: INFO nova.compute.manager [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Took 24.39 seconds to build instance. [ 1998.843260] env[62875]: DEBUG nova.policy [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e79993abf5eb47cc8449e3468d3cdd4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bef7d358bb2746efb448dbf759cac58c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 1999.003721] env[62875]: DEBUG nova.network.neutron [-] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.100650] env[62875]: INFO nova.scheduler.client.report [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Deleted allocations for instance 100c3541-3af3-4d3c-8060-2235f18f51e4 [ 1999.208522] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Successfully created port: 14731902-25d4-4bb5-91c1-caece4a9215f {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1999.295536] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1999.326107] env[62875]: DEBUG oslo_concurrency.lockutils [None req-30abedf0-9f60-4cc5-b0e5-e45fda323442 tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "6f936641-750d-49ae-8beb-bca35305d10d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.118s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.507031] env[62875]: INFO nova.compute.manager [-] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Took 1.03 seconds to deallocate network for instance. [ 1999.511070] env[62875]: DEBUG nova.compute.claims [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1999.511293] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.558155] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99905ee-29a0-428d-b925-4f218dc95c02 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.565906] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1a776b-cbdc-4c76-97ac-7e5a35cc741a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.597403] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe82252-56a7-4f03-b2d2-74741471b1ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.603186] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c30a06f-0539-462c-8dec-82d051e13ac5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.620451] env[62875]: DEBUG oslo_concurrency.lockutils [None req-616de360-b79e-47cd-872b-c6c51177212e tempest-ServerActionsTestOtherA-961067021 tempest-ServerActionsTestOtherA-961067021-project-member] Lock "100c3541-3af3-4d3c-8060-2235f18f51e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.398s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.621133] env[62875]: DEBUG nova.compute.provider_tree [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1999.813536] env[62875]: INFO nova.compute.manager [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Rebuilding instance [ 1999.828823] env[62875]: DEBUG nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1999.859749] env[62875]: DEBUG nova.compute.manager [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1999.860650] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d60425-3e18-43aa-bde7-fba0ccb56a30 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.126967] env[62875]: DEBUG nova.scheduler.client.report [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2000.130276] env[62875]: DEBUG nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2000.271028] env[62875]: DEBUG nova.compute.manager [req-c55a7fc8-ef56-4013-8f28-0f114e503d7f req-ab329d39-3f84-474c-b5e2-a292be383651 service nova] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Received event network-changed-14731902-25d4-4bb5-91c1-caece4a9215f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2000.271565] env[62875]: DEBUG nova.compute.manager [req-c55a7fc8-ef56-4013-8f28-0f114e503d7f req-ab329d39-3f84-474c-b5e2-a292be383651 service nova] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Refreshing instance network info cache due to event network-changed-14731902-25d4-4bb5-91c1-caece4a9215f. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2000.271565] env[62875]: DEBUG oslo_concurrency.lockutils [req-c55a7fc8-ef56-4013-8f28-0f114e503d7f req-ab329d39-3f84-474c-b5e2-a292be383651 service nova] Acquiring lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2000.271565] env[62875]: DEBUG oslo_concurrency.lockutils [req-c55a7fc8-ef56-4013-8f28-0f114e503d7f req-ab329d39-3f84-474c-b5e2-a292be383651 service nova] Acquired lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2000.271980] env[62875]: DEBUG nova.network.neutron [req-c55a7fc8-ef56-4013-8f28-0f114e503d7f req-ab329d39-3f84-474c-b5e2-a292be383651 service nova] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Refreshing network info cache for port 14731902-25d4-4bb5-91c1-caece4a9215f {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2000.308062] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2000.331249] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2000.331404] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2000.331553] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2000.331731] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2000.331877] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2000.332035] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2000.332249] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2000.332410] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2000.332576] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2000.332735] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2000.332904] env[62875]: DEBUG nova.virt.hardware [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2000.333783] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b47ed0c-05a1-4367-8457-397da6cc5242 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.346457] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ece0281-d0fa-4130-be63-282607d96cfe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.351368] env[62875]: ERROR nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. [ 2000.351368] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 2000.351368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2000.351368] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 2000.351368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2000.351368] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 2000.351368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2000.351368] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 2000.351368] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2000.351368] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 2000.351368] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2000.351368] env[62875]: ERROR nova.compute.manager raise self.value [ 2000.351368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2000.351368] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 2000.351368] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2000.351368] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 2000.351907] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2000.351907] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 2000.351907] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. [ 2000.351907] env[62875]: ERROR nova.compute.manager [ 2000.351907] env[62875]: Traceback (most recent call last): [ 2000.351907] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 2000.351907] env[62875]: listener.cb(fileno) [ 2000.351907] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2000.351907] env[62875]: result = function(*args, **kwargs) [ 2000.351907] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2000.351907] env[62875]: return func(*args, **kwargs) [ 2000.351907] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2000.351907] env[62875]: raise e [ 2000.351907] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2000.351907] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 2000.351907] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2000.351907] env[62875]: created_port_ids = self._update_ports_for_instance( [ 2000.351907] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2000.351907] env[62875]: with excutils.save_and_reraise_exception(): [ 2000.351907] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2000.351907] env[62875]: self.force_reraise() [ 2000.351907] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2000.351907] env[62875]: raise self.value [ 2000.351907] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2000.351907] env[62875]: updated_port = self._update_port( [ 2000.351907] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2000.351907] env[62875]: _ensure_no_port_binding_failure(port) [ 2000.351907] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2000.351907] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 2000.352885] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. [ 2000.352885] env[62875]: Removing descriptor: 18 [ 2000.361968] env[62875]: ERROR nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Traceback (most recent call last): [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] yield resources [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self.driver.spawn(context, instance, image_meta, [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] vm_ref = self.build_virtual_machine(instance, [ 2000.361968] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] vif_infos = vmwarevif.get_vif_info(self._session, [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] for vif in network_info: [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] return self._sync_wrapper(fn, *args, **kwargs) [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self.wait() [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self[:] = self._gt.wait() [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] return self._exit_event.wait() [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 2000.362401] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] current.throw(*self._exc) [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] result = function(*args, **kwargs) [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] return func(*args, **kwargs) [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] raise e [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] nwinfo = self.network_api.allocate_for_instance( [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] created_port_ids = self._update_ports_for_instance( [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] with excutils.save_and_reraise_exception(): [ 2000.362838] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self.force_reraise() [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] raise self.value [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] updated_port = self._update_port( [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] _ensure_no_port_binding_failure(port) [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] raise exception.PortBindingFailed(port_id=port['id']) [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] nova.exception.PortBindingFailed: Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. [ 2000.363347] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] [ 2000.363347] env[62875]: INFO nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Terminating instance [ 2000.364329] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.633707] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.634328] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2000.640360] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.685s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.645530] env[62875]: INFO nova.compute.claims [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2000.669372] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.792207] env[62875]: DEBUG nova.network.neutron [req-c55a7fc8-ef56-4013-8f28-0f114e503d7f req-ab329d39-3f84-474c-b5e2-a292be383651 service nova] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2000.867153] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2000.876065] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2000.877414] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-acd32aa5-6435-4e3e-b5a5-31f1810739df {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.885337] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2000.885337] env[62875]: value = "task-2179948" [ 2000.885337] env[62875]: _type = "Task" [ 2000.885337] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.893294] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.931033] env[62875]: DEBUG nova.network.neutron [req-c55a7fc8-ef56-4013-8f28-0f114e503d7f req-ab329d39-3f84-474c-b5e2-a292be383651 service nova] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2001.150394] env[62875]: DEBUG nova.compute.utils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2001.151752] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2001.151923] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2001.200998] env[62875]: DEBUG nova.policy [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c873183130d4f6cb6537323952c696b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e549c6f06ab945678c40795f872b319f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2001.395349] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179948, 'name': PowerOffVM_Task, 'duration_secs': 0.091499} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.396037] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2001.396369] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2001.397334] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4dc2f8-a78c-4f8c-8495-1b0a12ad8240 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.405323] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2001.405672] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4c8f290-32c5-45d5-b066-e8dbbabdc34d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.430889] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2001.431122] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2001.431309] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Deleting the datastore file [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2001.431568] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43faa406-6ef2-4589-ac78-5dbc12992da7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.434392] env[62875]: DEBUG oslo_concurrency.lockutils [req-c55a7fc8-ef56-4013-8f28-0f114e503d7f req-ab329d39-3f84-474c-b5e2-a292be383651 service nova] Releasing lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2001.434810] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2001.435209] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2001.442254] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2001.442254] env[62875]: value = "task-2179950" [ 2001.442254] env[62875]: _type = "Task" [ 2001.442254] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.453224] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.657726] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2001.951622] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109285} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.951898] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2001.952928] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2001.953090] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2001.960367] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2001.982570] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658908b2-ce80-442a-9f5e-759010a561fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.989862] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Successfully created port: 82cc5921-b44c-4ec7-8717-1f8dd3435cca {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2001.991941] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf0d1cf-9e61-4362-8680-1b6c37b6b815 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.026734] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717a26b3-4256-4560-84a3-2ea786641cbd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.035065] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878b0f6d-9f4c-423e-bf08-5c5ce9ee685c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.048236] env[62875]: DEBUG nova.compute.provider_tree [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2002.065373] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2002.298754] env[62875]: DEBUG nova.compute.manager [req-a04ef1fd-ee09-44ef-97b1-85e575f9527d req-624ede62-60f6-4f22-8046-4eaaca98eae0 service nova] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Received event network-vif-deleted-14731902-25d4-4bb5-91c1-caece4a9215f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2002.551321] env[62875]: DEBUG nova.scheduler.client.report [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2002.567139] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2002.567670] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2002.567785] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2002.568241] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f2ee9f5-343d-47c6-8be9-6ed9f718c33a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.578488] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dce317-c67c-4e65-9b7c-63e99637b029 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.600352] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 41ec8810-3f17-4f59-9828-a4a2e873eab4 could not be found. [ 2002.600594] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2002.600780] env[62875]: INFO nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2002.601035] env[62875]: DEBUG oslo.service.loopingcall [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2002.601265] env[62875]: DEBUG nova.compute.manager [-] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2002.601360] env[62875]: DEBUG nova.network.neutron [-] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2002.618650] env[62875]: DEBUG nova.network.neutron [-] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2002.670028] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2002.699365] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2002.699620] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2002.699778] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2002.699962] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2002.700165] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2002.700333] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2002.700539] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2002.700697] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2002.700864] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2002.701036] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2002.701221] env[62875]: DEBUG nova.virt.hardware [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2002.702074] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e2470e-8fe9-44e1-9664-28184a92a2f4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.710034] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305f9899-4fc8-4ffd-893d-7a5c50615891 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.983427] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2002.983733] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2002.983733] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2002.983902] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2002.984122] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2002.984377] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2002.984591] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2002.984751] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2002.984914] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2002.985094] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2002.985269] env[62875]: DEBUG nova.virt.hardware [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2002.986202] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2206398-9071-41fd-9da9-5e9ecb4e8da3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.994709] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e0d7b8-6c9c-4564-a72d-408eab60c5ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.009622] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2003.016566] env[62875]: DEBUG oslo.service.loopingcall [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2003.016799] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2003.017225] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72af0489-8366-4e15-9135-658f07fca4e7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.035019] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2003.035019] env[62875]: value = "task-2179951" [ 2003.035019] env[62875]: _type = "Task" [ 2003.035019] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.044765] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179951, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.055766] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.056324] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2003.059389] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.109s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.063202] env[62875]: INFO nova.compute.claims [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2003.121297] env[62875]: DEBUG nova.network.neutron [-] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.494229] env[62875]: ERROR nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. [ 2003.494229] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 2003.494229] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2003.494229] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 2003.494229] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2003.494229] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 2003.494229] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2003.494229] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 2003.494229] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2003.494229] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 2003.494229] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2003.494229] env[62875]: ERROR nova.compute.manager raise self.value [ 2003.494229] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2003.494229] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 2003.494229] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2003.494229] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 2003.494736] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2003.494736] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 2003.494736] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. [ 2003.494736] env[62875]: ERROR nova.compute.manager [ 2003.494736] env[62875]: Traceback (most recent call last): [ 2003.494736] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 2003.494736] env[62875]: listener.cb(fileno) [ 2003.494736] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2003.494736] env[62875]: result = function(*args, **kwargs) [ 2003.494736] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2003.494736] env[62875]: return func(*args, **kwargs) [ 2003.494736] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2003.494736] env[62875]: raise e [ 2003.494736] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2003.494736] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 2003.494736] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2003.494736] env[62875]: created_port_ids = self._update_ports_for_instance( [ 2003.494736] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2003.494736] env[62875]: with excutils.save_and_reraise_exception(): [ 2003.494736] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2003.494736] env[62875]: self.force_reraise() [ 2003.494736] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2003.494736] env[62875]: raise self.value [ 2003.494736] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2003.494736] env[62875]: updated_port = self._update_port( [ 2003.494736] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2003.494736] env[62875]: _ensure_no_port_binding_failure(port) [ 2003.494736] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2003.494736] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 2003.498160] env[62875]: nova.exception.PortBindingFailed: Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. [ 2003.498160] env[62875]: Removing descriptor: 18 [ 2003.498160] env[62875]: ERROR nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Traceback (most recent call last): [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] yield resources [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self.driver.spawn(context, instance, image_meta, [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2003.498160] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] vm_ref = self.build_virtual_machine(instance, [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] vif_infos = vmwarevif.get_vif_info(self._session, [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] for vif in network_info: [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] return self._sync_wrapper(fn, *args, **kwargs) [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self.wait() [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self[:] = self._gt.wait() [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] return self._exit_event.wait() [ 2003.498599] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] result = hub.switch() [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] return self.greenlet.switch() [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] result = function(*args, **kwargs) [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] return func(*args, **kwargs) [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] raise e [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] nwinfo = self.network_api.allocate_for_instance( [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2003.498970] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] created_port_ids = self._update_ports_for_instance( [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] with excutils.save_and_reraise_exception(): [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self.force_reraise() [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] raise self.value [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] updated_port = self._update_port( [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] _ensure_no_port_binding_failure(port) [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2003.499438] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] raise exception.PortBindingFailed(port_id=port['id']) [ 2003.499781] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] nova.exception.PortBindingFailed: Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. [ 2003.499781] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] [ 2003.499781] env[62875]: INFO nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Terminating instance [ 2003.544317] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179951, 'name': CreateVM_Task, 'duration_secs': 0.255703} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.544543] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2003.544882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.545056] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.545393] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2003.545792] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-180c690e-e0d7-4948-bc7d-04540f9a3954 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.549880] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2003.549880] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5272bfc9-67fc-050f-b0c5-b2f8a7389bd6" [ 2003.549880] env[62875]: _type = "Task" [ 2003.549880] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.557759] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5272bfc9-67fc-050f-b0c5-b2f8a7389bd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.572286] env[62875]: DEBUG nova.compute.utils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2003.575784] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2003.575784] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2003.625251] env[62875]: INFO nova.compute.manager [-] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Took 1.02 seconds to deallocate network for instance. [ 2003.627463] env[62875]: DEBUG nova.compute.claims [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2003.627650] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.630263] env[62875]: DEBUG nova.policy [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c17bfe9ca8d44c34b335e18c7aa3a583', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '816bd85d289047e38d3a7f169e61b5e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2004.003146] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Successfully created port: e046947c-decf-4f1d-b15d-bd447d3ebc74 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2004.003146] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Acquiring lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.003146] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Acquired lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.003146] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2004.063020] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5272bfc9-67fc-050f-b0c5-b2f8a7389bd6, 'name': SearchDatastore_Task, 'duration_secs': 0.007793} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.063020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.063020] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2004.063020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.063352] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.063352] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2004.063352] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d19f4933-a88e-458a-9b34-473d0eef735e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.073020] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2004.073020] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2004.073020] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37a7287c-f41e-4c0c-8073-b518b19e33c6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.079133] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2004.084070] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2004.084070] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b0652a-fe23-40db-ca4b-923044304983" [ 2004.084070] env[62875]: _type = "Task" [ 2004.084070] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.095758] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b0652a-fe23-40db-ca4b-923044304983, 'name': SearchDatastore_Task, 'duration_secs': 0.00809} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.096694] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a89b2fe6-d17e-4cd9-b24d-54fd4eb5956e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.104109] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2004.104109] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fef058-1d16-6091-34ab-7b590d65c3f4" [ 2004.104109] env[62875]: _type = "Task" [ 2004.104109] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.114159] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fef058-1d16-6091-34ab-7b590d65c3f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.343165] env[62875]: DEBUG nova.compute.manager [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Received event network-changed-82cc5921-b44c-4ec7-8717-1f8dd3435cca {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2004.343391] env[62875]: DEBUG nova.compute.manager [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Refreshing instance network info cache due to event network-changed-82cc5921-b44c-4ec7-8717-1f8dd3435cca. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2004.343809] env[62875]: DEBUG oslo_concurrency.lockutils [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] Acquiring lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.410643] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d612ed9-eeb3-4072-9675-99048d6bc84c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.418534] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444c9210-4eed-4b6d-94a2-a2bc17c2a717 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.450729] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f7db2b-3b8a-4750-bc4d-e51a47dd7ee5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.460151] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca795d9-41ff-4518-98b0-12b16f2b3637 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.473012] env[62875]: DEBUG nova.compute.provider_tree [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2004.526532] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2004.627036] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fef058-1d16-6091-34ab-7b590d65c3f4, 'name': SearchDatastore_Task, 'duration_secs': 0.008606} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.627036] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.627036] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2004.627036] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe57e4b9-1012-43e1-a6b0-78d35d1bdda4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.635963] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2004.635963] env[62875]: value = "task-2179952" [ 2004.635963] env[62875]: _type = "Task" [ 2004.635963] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.645589] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.667090] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.976977] env[62875]: DEBUG nova.scheduler.client.report [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2005.093044] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2005.123707] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T05:12:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='8f9e9086-496c-42f3-b0bf-4a3eb559ebba',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-841487995',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2005.123939] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2005.124109] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2005.124957] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2005.124957] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2005.124957] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2005.124957] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2005.124957] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2005.125311] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2005.125311] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2005.125994] env[62875]: DEBUG nova.virt.hardware [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2005.126320] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2336d1a-3269-4864-bc05-0b4b28830c48 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.134387] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593b2801-cd35-44bc-bf99-c5df008f765b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.159168] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477688} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.159630] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2005.159736] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2005.159900] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04e69b91-2a81-484b-934e-a872b83616e6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.167270] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2005.167270] env[62875]: value = "task-2179953" [ 2005.167270] env[62875]: _type = "Task" [ 2005.167270] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.171274] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Releasing lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.171760] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2005.171947] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2005.172477] env[62875]: DEBUG oslo_concurrency.lockutils [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] Acquired lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.172698] env[62875]: DEBUG nova.network.neutron [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Refreshing network info cache for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2005.174580] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0fc5d451-70b1-4771-9849-90a1a899cc32 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.183032] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179953, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.189130] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811aa4d1-6469-4143-8c63-f3a2576b3cd3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.215448] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5cf1f620-d0da-4e81-8d1f-e881c47dcad1 could not be found. [ 2005.215945] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2005.216152] env[62875]: INFO nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2005.216430] env[62875]: DEBUG oslo.service.loopingcall [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2005.216687] env[62875]: DEBUG nova.compute.manager [-] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2005.216781] env[62875]: DEBUG nova.network.neutron [-] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2005.235434] env[62875]: DEBUG nova.network.neutron [-] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2005.260181] env[62875]: ERROR nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. [ 2005.260181] env[62875]: ERROR nova.compute.manager Traceback (most recent call last): [ 2005.260181] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2005.260181] env[62875]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 2005.260181] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2005.260181] env[62875]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 2005.260181] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2005.260181] env[62875]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 2005.260181] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2005.260181] env[62875]: ERROR nova.compute.manager self.force_reraise() [ 2005.260181] env[62875]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2005.260181] env[62875]: ERROR nova.compute.manager raise self.value [ 2005.260181] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2005.260181] env[62875]: ERROR nova.compute.manager updated_port = self._update_port( [ 2005.260181] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2005.260181] env[62875]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 2005.260765] env[62875]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2005.260765] env[62875]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 2005.260765] env[62875]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. [ 2005.260765] env[62875]: ERROR nova.compute.manager [ 2005.260765] env[62875]: Traceback (most recent call last): [ 2005.260765] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 2005.260765] env[62875]: listener.cb(fileno) [ 2005.260765] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2005.260765] env[62875]: result = function(*args, **kwargs) [ 2005.260765] env[62875]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2005.260765] env[62875]: return func(*args, **kwargs) [ 2005.260765] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2005.260765] env[62875]: raise e [ 2005.260765] env[62875]: File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2005.260765] env[62875]: nwinfo = self.network_api.allocate_for_instance( [ 2005.260765] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2005.260765] env[62875]: created_port_ids = self._update_ports_for_instance( [ 2005.260765] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2005.260765] env[62875]: with excutils.save_and_reraise_exception(): [ 2005.260765] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2005.260765] env[62875]: self.force_reraise() [ 2005.260765] env[62875]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2005.260765] env[62875]: raise self.value [ 2005.260765] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2005.260765] env[62875]: updated_port = self._update_port( [ 2005.260765] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2005.260765] env[62875]: _ensure_no_port_binding_failure(port) [ 2005.260765] env[62875]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2005.260765] env[62875]: raise exception.PortBindingFailed(port_id=port['id']) [ 2005.261829] env[62875]: nova.exception.PortBindingFailed: Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. [ 2005.261829] env[62875]: Removing descriptor: 18 [ 2005.261829] env[62875]: ERROR nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Traceback (most recent call last): [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/compute/manager.py", line 2901, in _build_resources [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] yield resources [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self.driver.spawn(context, instance, image_meta, [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2005.261829] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] vm_ref = self.build_virtual_machine(instance, [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] vif_infos = vmwarevif.get_vif_info(self._session, [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] for vif in network_info: [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] return self._sync_wrapper(fn, *args, **kwargs) [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self.wait() [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self[:] = self._gt.wait() [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] return self._exit_event.wait() [ 2005.262227] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] result = hub.switch() [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] return self.greenlet.switch() [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] result = function(*args, **kwargs) [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] return func(*args, **kwargs) [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] raise e [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] nwinfo = self.network_api.allocate_for_instance( [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2005.262700] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] created_port_ids = self._update_ports_for_instance( [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] with excutils.save_and_reraise_exception(): [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self.force_reraise() [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] raise self.value [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] updated_port = self._update_port( [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] _ensure_no_port_binding_failure(port) [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2005.263149] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] raise exception.PortBindingFailed(port_id=port['id']) [ 2005.263537] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] nova.exception.PortBindingFailed: Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. [ 2005.263537] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] [ 2005.263537] env[62875]: INFO nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Terminating instance [ 2005.484933] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.485585] env[62875]: DEBUG nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2005.488802] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.615s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.490222] env[62875]: INFO nova.compute.claims [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2005.684328] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179953, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079742} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.684328] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2005.684918] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c0df93-d91c-4b36-b281-c90a63cce391 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.704945] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2005.705881] env[62875]: DEBUG nova.network.neutron [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2005.707655] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13917626-59c5-49d8-bf15-a83d22a9ac42 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.727210] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2005.727210] env[62875]: value = "task-2179954" [ 2005.727210] env[62875]: _type = "Task" [ 2005.727210] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.739019] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179954, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.739019] env[62875]: DEBUG nova.network.neutron [-] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.765842] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.766144] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquired lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.767079] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2005.998366] env[62875]: DEBUG nova.compute.utils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2005.999922] env[62875]: DEBUG nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2006.000110] env[62875]: DEBUG nova.network.neutron [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2006.027228] env[62875]: DEBUG nova.network.neutron [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.051625] env[62875]: DEBUG nova.policy [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd435c7b3cc8649d2a2e6cba7aa79255f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9e631a043ef4ed9ae37c18a142afa38', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2006.238318] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179954, 'name': ReconfigVM_Task, 'duration_secs': 0.316026} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.238598] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2006.239231] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7b5b982-6f58-4872-b1ea-e29e261b8ae0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.241755] env[62875]: INFO nova.compute.manager [-] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Took 1.02 seconds to deallocate network for instance. [ 2006.246030] env[62875]: DEBUG nova.compute.claims [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2006.246030] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.247249] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2006.247249] env[62875]: value = "task-2179955" [ 2006.247249] env[62875]: _type = "Task" [ 2006.247249] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.256809] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179955, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.289999] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2006.367063] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.406781] env[62875]: DEBUG nova.network.neutron [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Successfully created port: 6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2006.504418] env[62875]: DEBUG nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2006.533446] env[62875]: DEBUG oslo_concurrency.lockutils [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] Releasing lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.533446] env[62875]: DEBUG nova.compute.manager [req-5a8693e4-1f80-4860-b2a7-caaef90e7439 req-86f0d74f-d3e5-4900-b1e8-e1058defda7a service nova] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Received event network-vif-deleted-82cc5921-b44c-4ec7-8717-1f8dd3435cca {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2006.581023] env[62875]: DEBUG nova.compute.manager [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Received event network-changed-e046947c-decf-4f1d-b15d-bd447d3ebc74 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2006.581023] env[62875]: DEBUG nova.compute.manager [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Refreshing instance network info cache due to event network-changed-e046947c-decf-4f1d-b15d-bd447d3ebc74. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2006.581023] env[62875]: DEBUG oslo_concurrency.lockutils [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] Acquiring lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.766066] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179955, 'name': Rename_Task, 'duration_secs': 0.147018} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.766401] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2006.766919] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27ebd64d-68b9-4744-afe8-ac21d0d73337 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.776341] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2006.776341] env[62875]: value = "task-2179956" [ 2006.776341] env[62875]: _type = "Task" [ 2006.776341] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.787469] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquiring lock "c217e435-c5d8-406b-99ee-ec71580fb344" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.787693] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "c217e435-c5d8-406b-99ee-ec71580fb344" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.788880] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.867566] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beef9adc-f6f3-4b9e-a225-e7f3ba3d7887 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.870727] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Releasing lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.871176] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2006.871409] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2006.871747] env[62875]: DEBUG oslo_concurrency.lockutils [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] Acquired lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.871966] env[62875]: DEBUG nova.network.neutron [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Refreshing network info cache for port e046947c-decf-4f1d-b15d-bd447d3ebc74 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2006.873039] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e176aa7c-5c95-4b6c-af9a-c172642e2261 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.880522] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77637508-4ba4-47e5-88f3-8edd25a2d98a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.886613] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20af44e2-a0a3-47c0-a25d-a6e4d0b2e101 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.933949] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06673aa1-734c-4163-b26f-81d0613ce591 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.937163] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fd7ba11a-18d1-4f96-a445-eedce740b0c3 could not be found. [ 2006.937424] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2006.937622] env[62875]: INFO nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Took 0.07 seconds to destroy the instance on the hypervisor. [ 2006.937866] env[62875]: DEBUG oslo.service.loopingcall [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2006.938388] env[62875]: DEBUG nova.compute.manager [-] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2006.938489] env[62875]: DEBUG nova.network.neutron [-] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2006.945467] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7c1f30-0817-4072-8e2e-3be86c88f4fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.959574] env[62875]: DEBUG nova.compute.provider_tree [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2006.961332] env[62875]: DEBUG nova.network.neutron [-] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2007.286823] env[62875]: DEBUG oslo_vmware.api [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179956, 'name': PowerOnVM_Task, 'duration_secs': 0.395955} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.287126] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2007.287352] env[62875]: DEBUG nova.compute.manager [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2007.288140] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed583517-eb2f-4681-99d7-1c0a78a62e1d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.393353] env[62875]: DEBUG nova.network.neutron [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2007.464200] env[62875]: DEBUG nova.scheduler.client.report [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2007.467060] env[62875]: DEBUG nova.network.neutron [-] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.472018] env[62875]: DEBUG nova.network.neutron [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.514751] env[62875]: DEBUG nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2007.541595] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2007.541833] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2007.543860] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2007.543860] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2007.543860] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2007.543860] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2007.543860] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2007.544122] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2007.544122] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2007.544122] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2007.544122] env[62875]: DEBUG nova.virt.hardware [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2007.544275] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b09c11-f666-4097-afc6-643f86ac2898 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.554353] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a830549-942f-47e9-99fb-8c9072041027 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.806432] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.909913] env[62875]: DEBUG nova.compute.manager [req-713f78a5-a764-4dc5-a81d-8f4bd524fc19 req-6aa3f0fd-8a25-4083-b541-2e4264c3f239 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Received event network-vif-plugged-6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2007.910119] env[62875]: DEBUG oslo_concurrency.lockutils [req-713f78a5-a764-4dc5-a81d-8f4bd524fc19 req-6aa3f0fd-8a25-4083-b541-2e4264c3f239 service nova] Acquiring lock "7969485a-ccd6-48e0-bdea-b8920af28843-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.911198] env[62875]: DEBUG oslo_concurrency.lockutils [req-713f78a5-a764-4dc5-a81d-8f4bd524fc19 req-6aa3f0fd-8a25-4083-b541-2e4264c3f239 service nova] Lock "7969485a-ccd6-48e0-bdea-b8920af28843-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.911198] env[62875]: DEBUG oslo_concurrency.lockutils [req-713f78a5-a764-4dc5-a81d-8f4bd524fc19 req-6aa3f0fd-8a25-4083-b541-2e4264c3f239 service nova] Lock "7969485a-ccd6-48e0-bdea-b8920af28843-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.911198] env[62875]: DEBUG nova.compute.manager [req-713f78a5-a764-4dc5-a81d-8f4bd524fc19 req-6aa3f0fd-8a25-4083-b541-2e4264c3f239 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] No waiting events found dispatching network-vif-plugged-6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2007.911198] env[62875]: WARNING nova.compute.manager [req-713f78a5-a764-4dc5-a81d-8f4bd524fc19 req-6aa3f0fd-8a25-4083-b541-2e4264c3f239 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Received unexpected event network-vif-plugged-6c87f73d-311a-4ed3-9d9f-5325a201e67e for instance with vm_state building and task_state spawning. [ 2007.970706] env[62875]: INFO nova.compute.manager [-] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Took 1.03 seconds to deallocate network for instance. [ 2007.971459] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.972584] env[62875]: DEBUG oslo_concurrency.lockutils [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] Releasing lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.972805] env[62875]: DEBUG nova.compute.manager [req-1905fa89-396f-42fa-a369-fcfeb80edf3e req-6d3aa9ce-92b3-4671-b0dc-01f2ec93601c service nova] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Received event network-vif-deleted-e046947c-decf-4f1d-b15d-bd447d3ebc74 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2007.974471] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.936s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.975962] env[62875]: DEBUG nova.compute.claims [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Aborting claim: {{(pid=62875) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2007.976150] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.003735] env[62875]: DEBUG nova.network.neutron [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Successfully updated port: 6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2008.474499] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "49cb9b17-808c-4314-be1e-5b089d9885f6" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.474786] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "49cb9b17-808c-4314-be1e-5b089d9885f6" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.506507] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquiring lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.506664] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquired lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.506836] env[62875]: DEBUG nova.network.neutron [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2008.978106] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "49cb9b17-808c-4314-be1e-5b089d9885f6" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.978570] env[62875]: DEBUG nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2009.006605] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 77d57f64-9bab-46f1-87b4-62bac5c5d2bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.006798] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 5f190f86-2faa-4b8e-821f-2113577541e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.006927] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 6f936641-750d-49ae-8beb-bca35305d10d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.007059] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.007180] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 41ec8810-3f17-4f59-9828-a4a2e873eab4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.007295] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 5cf1f620-d0da-4e81-8d1f-e881c47dcad1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.007408] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance fd7ba11a-18d1-4f96-a445-eedce740b0c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.007520] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7969485a-ccd6-48e0-bdea-b8920af28843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.007632] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c6de797f-03f7-4dca-9c6a-e7b840990be6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2009.044985] env[62875]: DEBUG nova.network.neutron [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2009.185709] env[62875]: INFO nova.compute.manager [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Rebuilding instance [ 2009.233754] env[62875]: DEBUG nova.compute.manager [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2009.234624] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5971aa7-f7bb-46ad-963c-4221ead48832 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.354063] env[62875]: DEBUG nova.network.neutron [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updating instance_info_cache with network_info: [{"id": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "address": "fa:16:3e:e9:29:5c", "network": {"id": "3028cdbc-4b41-4102-bd32-59fd93a60bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-125987334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9e631a043ef4ed9ae37c18a142afa38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87f73d-31", "ovs_interfaceid": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.483728] env[62875]: DEBUG nova.compute.utils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2009.485676] env[62875]: DEBUG nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2009.485676] env[62875]: DEBUG nova.network.neutron [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2009.511515] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance acc78084-21e8-456c-a573-fc5e931147c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2009.524977] env[62875]: DEBUG nova.policy [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '232d3ef2eacb4e6da1c639a88d6f2451', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23bd5343b96d403ea842f64e769df52e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2009.813570] env[62875]: DEBUG nova.network.neutron [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Successfully created port: 08671000-5c6e-4076-b739-0d436002843b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2009.858955] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Releasing lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.859145] env[62875]: DEBUG nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Instance network_info: |[{"id": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "address": "fa:16:3e:e9:29:5c", "network": {"id": "3028cdbc-4b41-4102-bd32-59fd93a60bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-125987334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9e631a043ef4ed9ae37c18a142afa38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87f73d-31", "ovs_interfaceid": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2009.861641] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:29:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a31c4b8-5b72-4f32-aab3-c4e963e684dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c87f73d-311a-4ed3-9d9f-5325a201e67e', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2009.870850] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Creating folder: Project (b9e631a043ef4ed9ae37c18a142afa38). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2009.871194] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d062500-a5a4-4aa4-81a9-2e685e63650a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.882661] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Created folder: Project (b9e631a043ef4ed9ae37c18a142afa38) in parent group-v444854. [ 2009.882854] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Creating folder: Instances. Parent ref: group-v444866. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2009.883116] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31426ef7-b470-44f6-9af1-cfc9bb8162be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.892120] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Created folder: Instances in parent group-v444866. [ 2009.892361] env[62875]: DEBUG oslo.service.loopingcall [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2009.892545] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2009.892742] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e56ea41-7f55-4fe9-a13d-0d3bf3654b11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.912736] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2009.912736] env[62875]: value = "task-2179959" [ 2009.912736] env[62875]: _type = "Task" [ 2009.912736] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.921480] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179959, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.941700] env[62875]: DEBUG nova.compute.manager [req-7c16fbea-6276-4dae-a0bf-1fe74e53bb4a req-f88af9df-087a-47c4-bfb7-f9103d48d240 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Received event network-changed-6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2009.941700] env[62875]: DEBUG nova.compute.manager [req-7c16fbea-6276-4dae-a0bf-1fe74e53bb4a req-f88af9df-087a-47c4-bfb7-f9103d48d240 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Refreshing instance network info cache due to event network-changed-6c87f73d-311a-4ed3-9d9f-5325a201e67e. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2009.941700] env[62875]: DEBUG oslo_concurrency.lockutils [req-7c16fbea-6276-4dae-a0bf-1fe74e53bb4a req-f88af9df-087a-47c4-bfb7-f9103d48d240 service nova] Acquiring lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.941700] env[62875]: DEBUG oslo_concurrency.lockutils [req-7c16fbea-6276-4dae-a0bf-1fe74e53bb4a req-f88af9df-087a-47c4-bfb7-f9103d48d240 service nova] Acquired lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.942195] env[62875]: DEBUG nova.network.neutron [req-7c16fbea-6276-4dae-a0bf-1fe74e53bb4a req-f88af9df-087a-47c4-bfb7-f9103d48d240 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Refreshing network info cache for port 6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2009.988112] env[62875]: DEBUG nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2010.014147] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a64253fe-4ba9-4686-810b-a26a4c29631b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2010.248585] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2010.249881] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffe77a20-06e9-4789-ad6d-90a8ff23d37e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.256067] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2010.256067] env[62875]: value = "task-2179960" [ 2010.256067] env[62875]: _type = "Task" [ 2010.256067] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.263888] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179960, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.423063] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179959, 'name': CreateVM_Task, 'duration_secs': 0.312206} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.423063] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2010.429814] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.430055] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.430406] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2010.430662] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c67346b5-688e-4f64-9065-d4fe3e8ba5de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.435879] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2010.435879] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52760e17-74d2-f6fd-bf47-ca60a7aeabf3" [ 2010.435879] env[62875]: _type = "Task" [ 2010.435879] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.443076] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52760e17-74d2-f6fd-bf47-ca60a7aeabf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.518490] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 305aebbe-f983-4826-b8c0-9854458f7d48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2010.767019] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179960, 'name': PowerOffVM_Task, 'duration_secs': 0.130997} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.767019] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2010.767260] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2010.768157] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d1a069-704f-445d-96d7-d622f235575b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.776492] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2010.776744] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-809edfb2-1dad-48e2-9079-3aabff8e1097 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.803557] env[62875]: DEBUG nova.network.neutron [req-7c16fbea-6276-4dae-a0bf-1fe74e53bb4a req-f88af9df-087a-47c4-bfb7-f9103d48d240 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updated VIF entry in instance network info cache for port 6c87f73d-311a-4ed3-9d9f-5325a201e67e. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2010.803909] env[62875]: DEBUG nova.network.neutron [req-7c16fbea-6276-4dae-a0bf-1fe74e53bb4a req-f88af9df-087a-47c4-bfb7-f9103d48d240 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updating instance_info_cache with network_info: [{"id": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "address": "fa:16:3e:e9:29:5c", "network": {"id": "3028cdbc-4b41-4102-bd32-59fd93a60bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-125987334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9e631a043ef4ed9ae37c18a142afa38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87f73d-31", "ovs_interfaceid": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2010.808352] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2010.808352] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2010.808352] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Deleting the datastore file [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2010.808352] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f2bf9af-00f8-4321-bede-5e3a19117292 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.813202] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2010.813202] env[62875]: value = "task-2179962" [ 2010.813202] env[62875]: _type = "Task" [ 2010.813202] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.823029] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.947150] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52760e17-74d2-f6fd-bf47-ca60a7aeabf3, 'name': SearchDatastore_Task, 'duration_secs': 0.008244} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.947476] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2010.947763] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2010.948120] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.948202] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.948356] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2010.948621] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3455678c-2514-437c-94fe-988e70daed9f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.956494] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2010.956673] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2010.957407] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80762fbe-7e4e-45c6-ba87-70d0c35dc9fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.962443] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2010.962443] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52465f36-f581-9d5b-065c-6dc5f0df7a0a" [ 2010.962443] env[62875]: _type = "Task" [ 2010.962443] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.969982] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52465f36-f581-9d5b-065c-6dc5f0df7a0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.003741] env[62875]: DEBUG nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2011.021181] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c1e107cd-5c03-405f-bdae-3281dc4844d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2011.028484] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2011.029122] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2011.029122] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2011.029122] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2011.029306] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2011.030024] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2011.030024] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2011.030024] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2011.030024] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2011.030318] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2011.030360] env[62875]: DEBUG nova.virt.hardware [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2011.031190] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedda631-a616-4f28-bee1-215ff148702e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.040091] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ebd337-e224-4fae-94e4-edaee83a2034 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.308073] env[62875]: DEBUG oslo_concurrency.lockutils [req-7c16fbea-6276-4dae-a0bf-1fe74e53bb4a req-f88af9df-087a-47c4-bfb7-f9103d48d240 service nova] Releasing lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.323778] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090513} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.324636] env[62875]: DEBUG nova.network.neutron [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Successfully updated port: 08671000-5c6e-4076-b739-0d436002843b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2011.329038] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2011.329038] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2011.329038] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2011.474797] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52465f36-f581-9d5b-065c-6dc5f0df7a0a, 'name': SearchDatastore_Task, 'duration_secs': 0.008363} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.475519] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c9a577e-8d72-4f55-9284-0318940faa62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.480223] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2011.480223] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ed8fa7-9876-a5fc-4eed-60d83b3c4f36" [ 2011.480223] env[62875]: _type = "Task" [ 2011.480223] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.487173] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ed8fa7-9876-a5fc-4eed-60d83b3c4f36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.524435] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 37ae8e69-f953-4846-8a21-fed697ea575a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2011.830027] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "refresh_cache-c6de797f-03f7-4dca-9c6a-e7b840990be6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2011.830027] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquired lock "refresh_cache-c6de797f-03f7-4dca-9c6a-e7b840990be6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.830027] env[62875]: DEBUG nova.network.neutron [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2011.990591] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ed8fa7-9876-a5fc-4eed-60d83b3c4f36, 'name': SearchDatastore_Task, 'duration_secs': 0.008913} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.990860] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.991130] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 7969485a-ccd6-48e0-bdea-b8920af28843/7969485a-ccd6-48e0-bdea-b8920af28843.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2011.991390] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13f349ae-c853-4ccb-86d4-a1c252ef744d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.998175] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2011.998175] env[62875]: value = "task-2179963" [ 2011.998175] env[62875]: _type = "Task" [ 2011.998175] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.008297] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.018166] env[62875]: DEBUG nova.compute.manager [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Received event network-vif-plugged-08671000-5c6e-4076-b739-0d436002843b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2012.018364] env[62875]: DEBUG oslo_concurrency.lockutils [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] Acquiring lock "c6de797f-03f7-4dca-9c6a-e7b840990be6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.018544] env[62875]: DEBUG oslo_concurrency.lockutils [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] Lock "c6de797f-03f7-4dca-9c6a-e7b840990be6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.018692] env[62875]: DEBUG oslo_concurrency.lockutils [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] Lock "c6de797f-03f7-4dca-9c6a-e7b840990be6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.018862] env[62875]: DEBUG nova.compute.manager [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] No waiting events found dispatching network-vif-plugged-08671000-5c6e-4076-b739-0d436002843b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2012.019048] env[62875]: WARNING nova.compute.manager [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Received unexpected event network-vif-plugged-08671000-5c6e-4076-b739-0d436002843b for instance with vm_state building and task_state spawning. [ 2012.019248] env[62875]: DEBUG nova.compute.manager [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Received event network-changed-08671000-5c6e-4076-b739-0d436002843b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2012.019414] env[62875]: DEBUG nova.compute.manager [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Refreshing instance network info cache due to event network-changed-08671000-5c6e-4076-b739-0d436002843b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2012.019595] env[62875]: DEBUG oslo_concurrency.lockutils [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] Acquiring lock "refresh_cache-c6de797f-03f7-4dca-9c6a-e7b840990be6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.026888] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d0c4095f-2d78-4055-b568-7e70e7c4c182 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2012.361837] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2012.362137] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2012.362374] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2012.362642] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2012.362806] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2012.363048] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2012.363347] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2012.363522] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2012.363766] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2012.363959] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2012.364159] env[62875]: DEBUG nova.virt.hardware [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2012.365120] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd53121-e5eb-4f99-b1ce-4636a005fbe5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.369231] env[62875]: DEBUG nova.network.neutron [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2012.374829] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57a2773-a08c-443e-8b24-877f9f5e4928 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.390385] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2012.397183] env[62875]: DEBUG oslo.service.loopingcall [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2012.400069] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2012.400378] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e4c0a72-d1b8-4ee3-ab78-62549d7e95ca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.419063] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2012.419063] env[62875]: value = "task-2179964" [ 2012.419063] env[62875]: _type = "Task" [ 2012.419063] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.427513] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179964, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.508704] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496183} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.511332] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 7969485a-ccd6-48e0-bdea-b8920af28843/7969485a-ccd6-48e0-bdea-b8920af28843.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2012.511575] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2012.511841] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9f6f894-2800-4577-9a72-8d2bd0319e7f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.517643] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2012.517643] env[62875]: value = "task-2179965" [ 2012.517643] env[62875]: _type = "Task" [ 2012.517643] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.524996] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.529813] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 8361611a-ad16-43ef-94e0-f2e7e9851682 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2012.547317] env[62875]: DEBUG nova.network.neutron [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Updating instance_info_cache with network_info: [{"id": "08671000-5c6e-4076-b739-0d436002843b", "address": "fa:16:3e:ec:1f:34", "network": {"id": "71f732a3-5f05-47c7-88d6-316acc65c694", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-695018177-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23bd5343b96d403ea842f64e769df52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08671000-5c", "ovs_interfaceid": "08671000-5c6e-4076-b739-0d436002843b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.929551] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179964, 'name': CreateVM_Task, 'duration_secs': 0.368227} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.929731] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2012.930189] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.930402] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.930714] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2012.930956] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1597935e-b3b3-4c57-8876-c56ebf27074a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.935322] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2012.935322] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5218588f-6640-ba0f-cb64-8eaa392f2167" [ 2012.935322] env[62875]: _type = "Task" [ 2012.935322] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.943338] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5218588f-6640-ba0f-cb64-8eaa392f2167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.027253] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059922} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.027760] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2013.028825] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853dd6bd-8851-495d-b99a-39a2a11192ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.033125] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2106a09b-554e-41dd-aa3a-c190b62d0afc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2013.053593] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 7969485a-ccd6-48e0-bdea-b8920af28843/7969485a-ccd6-48e0-bdea-b8920af28843.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2013.055034] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 816e0ecb-6476-49bb-9fea-a01067f25b51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2013.056486] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Releasing lock "refresh_cache-c6de797f-03f7-4dca-9c6a-e7b840990be6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.056486] env[62875]: DEBUG nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Instance network_info: |[{"id": "08671000-5c6e-4076-b739-0d436002843b", "address": "fa:16:3e:ec:1f:34", "network": {"id": "71f732a3-5f05-47c7-88d6-316acc65c694", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-695018177-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23bd5343b96d403ea842f64e769df52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08671000-5c", "ovs_interfaceid": "08671000-5c6e-4076-b739-0d436002843b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2013.056960] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dc75779-862e-4dd4-94e0-70bfc487a8a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.072160] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2cf54268-5499-49c9-8029-68b3866581d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2013.073604] env[62875]: DEBUG oslo_concurrency.lockutils [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] Acquired lock "refresh_cache-c6de797f-03f7-4dca-9c6a-e7b840990be6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.073604] env[62875]: DEBUG nova.network.neutron [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Refreshing network info cache for port 08671000-5c6e-4076-b739-0d436002843b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2013.074718] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:1f:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08671000-5c6e-4076-b739-0d436002843b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2013.081996] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Creating folder: Project (23bd5343b96d403ea842f64e769df52e). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2013.083607] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 9e0aaea6-96cf-494d-9f70-a709a47f9772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2013.086621] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fe82a64-bda5-45e7-9b38-340a64fd4161 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.094756] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2013.094756] env[62875]: value = "task-2179966" [ 2013.094756] env[62875]: _type = "Task" [ 2013.094756] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.098791] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Created folder: Project (23bd5343b96d403ea842f64e769df52e) in parent group-v444854. [ 2013.098971] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Creating folder: Instances. Parent ref: group-v444870. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2013.099564] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b53d8480-59dc-498b-b547-cfcc19a8de26 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.104362] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179966, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.112034] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Created folder: Instances in parent group-v444870. [ 2013.112034] env[62875]: DEBUG oslo.service.loopingcall [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2013.112034] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2013.112034] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e94db92-6dbd-4a9d-80b5-ead42ae883fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.133067] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2013.133067] env[62875]: value = "task-2179969" [ 2013.133067] env[62875]: _type = "Task" [ 2013.133067] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.139847] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179969, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.316647] env[62875]: DEBUG nova.network.neutron [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Updated VIF entry in instance network info cache for port 08671000-5c6e-4076-b739-0d436002843b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2013.316647] env[62875]: DEBUG nova.network.neutron [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Updating instance_info_cache with network_info: [{"id": "08671000-5c6e-4076-b739-0d436002843b", "address": "fa:16:3e:ec:1f:34", "network": {"id": "71f732a3-5f05-47c7-88d6-316acc65c694", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-695018177-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23bd5343b96d403ea842f64e769df52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08671000-5c", "ovs_interfaceid": "08671000-5c6e-4076-b739-0d436002843b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.445828] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5218588f-6640-ba0f-cb64-8eaa392f2167, 'name': SearchDatastore_Task, 'duration_secs': 0.01267} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.446161] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.446430] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2013.446710] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.446889] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.447122] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2013.447409] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da31e1ce-5d5c-475f-aa99-08df8da03826 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.455141] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2013.455531] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2013.456018] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ed017e5-55de-4e10-ad19-f0adf0a1e697 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.461300] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2013.461300] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd90d6-76e0-1c7e-9dce-738f047f40cb" [ 2013.461300] env[62875]: _type = "Task" [ 2013.461300] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.469101] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd90d6-76e0-1c7e-9dce-738f047f40cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.590637] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2013.603842] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179966, 'name': ReconfigVM_Task, 'duration_secs': 0.279175} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.604664] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 7969485a-ccd6-48e0-bdea-b8920af28843/7969485a-ccd6-48e0-bdea-b8920af28843.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2013.605242] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4ecb489-b162-4d42-b092-41b0d1538a45 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.611510] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2013.611510] env[62875]: value = "task-2179970" [ 2013.611510] env[62875]: _type = "Task" [ 2013.611510] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.618895] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179970, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.641550] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179969, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.819634] env[62875]: DEBUG oslo_concurrency.lockutils [req-15ba99b0-1ed5-41f4-9351-d68533cc19ae req-95e63a04-825f-4442-86a6-6b26d18f6d71 service nova] Releasing lock "refresh_cache-c6de797f-03f7-4dca-9c6a-e7b840990be6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.972535] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd90d6-76e0-1c7e-9dce-738f047f40cb, 'name': SearchDatastore_Task, 'duration_secs': 0.009297} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.973447] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-488924d6-8be9-4ec1-b6b8-184fc4eb8cdb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.978542] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2013.978542] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522b7279-30ba-6d55-9eb0-bb84eda59c7b" [ 2013.978542] env[62875]: _type = "Task" [ 2013.978542] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.986137] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522b7279-30ba-6d55-9eb0-bb84eda59c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.093793] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 70547fbd-7ce8-466e-8abc-b490b8dd6b28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2014.122689] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179970, 'name': Rename_Task, 'duration_secs': 0.171812} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.122963] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2014.123211] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-618bff41-e36b-4f0b-9b56-91ed1acc4c4e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.129626] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2014.129626] env[62875]: value = "task-2179971" [ 2014.129626] env[62875]: _type = "Task" [ 2014.129626] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.138935] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179971, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.143633] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179969, 'name': CreateVM_Task, 'duration_secs': 0.857129} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.143780] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2014.144410] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.144571] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.144873] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2014.145106] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87e37715-3c35-45bd-bf7a-ebd5551728f9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.149013] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2014.149013] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52decb4c-c96d-7c36-dc6c-8e401859673a" [ 2014.149013] env[62875]: _type = "Task" [ 2014.149013] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.155966] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52decb4c-c96d-7c36-dc6c-8e401859673a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.489403] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522b7279-30ba-6d55-9eb0-bb84eda59c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.008194} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.489681] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.489935] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2014.490207] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df6ea681-ed65-4982-bda6-3e77d99ba53d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.496532] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2014.496532] env[62875]: value = "task-2179972" [ 2014.496532] env[62875]: _type = "Task" [ 2014.496532] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.504239] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.597341] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c217e435-c5d8-406b-99ee-ec71580fb344 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2014.597690] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2014.597952] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2014.641266] env[62875]: DEBUG oslo_vmware.api [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2179971, 'name': PowerOnVM_Task, 'duration_secs': 0.429388} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.644071] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2014.644293] env[62875]: INFO nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Took 7.13 seconds to spawn the instance on the hypervisor. [ 2014.644473] env[62875]: DEBUG nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2014.645518] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a090108b-8508-42be-9559-5bbc75e12f6d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.664237] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52decb4c-c96d-7c36-dc6c-8e401859673a, 'name': SearchDatastore_Task, 'duration_secs': 0.009362} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.664604] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.664847] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2014.665100] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.665255] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.665433] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2014.665693] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a32f9a8d-88df-48cf-a65b-697bebfaaf69 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.686026] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2014.686026] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2014.686679] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0057a5d3-757b-4cd2-a0f9-9624eae35960 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.699627] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2014.699627] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e84a66-22f8-6bba-6944-0f578c0dd12e" [ 2014.699627] env[62875]: _type = "Task" [ 2014.699627] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.711948] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e84a66-22f8-6bba-6944-0f578c0dd12e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.910559] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88be3489-ea84-496a-a24f-be91dad1b950 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.920623] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82500f8-3a47-40ad-801c-0205d5e39372 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.954036] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f03e2af-64ed-4a76-8064-140ba6922342 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.961943] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57f866e-fbbf-4e0f-90a3-bd607e9e6643 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.976043] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2015.006779] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179972, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462113} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.007063] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2015.007312] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2015.007521] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-013db81e-17f7-4ec3-8ee5-c6584fecdc76 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.014168] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2015.014168] env[62875]: value = "task-2179973" [ 2015.014168] env[62875]: _type = "Task" [ 2015.014168] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.022161] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179973, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.168309] env[62875]: INFO nova.compute.manager [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Took 28.24 seconds to build instance. [ 2015.210988] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e84a66-22f8-6bba-6944-0f578c0dd12e, 'name': SearchDatastore_Task, 'duration_secs': 0.049521} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.212121] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-302597c2-a66d-4e3a-bf5b-2147b72bf815 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.216872] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2015.216872] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522a4680-a57f-3f78-3a16-1caeef454fc2" [ 2015.216872] env[62875]: _type = "Task" [ 2015.216872] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.224420] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522a4680-a57f-3f78-3a16-1caeef454fc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.479245] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2015.528914] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179973, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067631} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.531306] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2015.532491] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c193912-5f8b-4854-bb41-032cdf592a03 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.558717] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2015.559260] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a270710e-d87c-4007-a617-b21c5ba6af1a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.582681] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2015.582681] env[62875]: value = "task-2179974" [ 2015.582681] env[62875]: _type = "Task" [ 2015.582681] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.592991] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179974, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.670425] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6717925-b8f0-44cc-94c2-1d12802e035e tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "7969485a-ccd6-48e0-bdea-b8920af28843" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.686s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.728513] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522a4680-a57f-3f78-3a16-1caeef454fc2, 'name': SearchDatastore_Task, 'duration_secs': 0.009725} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.728818] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.729110] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] c6de797f-03f7-4dca-9c6a-e7b840990be6/c6de797f-03f7-4dca-9c6a-e7b840990be6.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2015.730537] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1f2ba76-c49d-4d84-bb81-e756080e8b56 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.735691] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2015.735691] env[62875]: value = "task-2179975" [ 2015.735691] env[62875]: _type = "Task" [ 2015.735691] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.743783] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179975, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.935251] env[62875]: DEBUG nova.compute.manager [req-05dadecf-c10e-4350-8e29-248cfa812af7 req-578a4c5a-4a53-413a-ab37-e3f898454c47 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Received event network-changed-6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2015.935251] env[62875]: DEBUG nova.compute.manager [req-05dadecf-c10e-4350-8e29-248cfa812af7 req-578a4c5a-4a53-413a-ab37-e3f898454c47 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Refreshing instance network info cache due to event network-changed-6c87f73d-311a-4ed3-9d9f-5325a201e67e. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2015.935251] env[62875]: DEBUG oslo_concurrency.lockutils [req-05dadecf-c10e-4350-8e29-248cfa812af7 req-578a4c5a-4a53-413a-ab37-e3f898454c47 service nova] Acquiring lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2015.935777] env[62875]: DEBUG oslo_concurrency.lockutils [req-05dadecf-c10e-4350-8e29-248cfa812af7 req-578a4c5a-4a53-413a-ab37-e3f898454c47 service nova] Acquired lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.935923] env[62875]: DEBUG nova.network.neutron [req-05dadecf-c10e-4350-8e29-248cfa812af7 req-578a4c5a-4a53-413a-ab37-e3f898454c47 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Refreshing network info cache for port 6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2015.984393] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2015.984707] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.010s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.985045] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.120s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.095468] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179974, 'name': ReconfigVM_Task, 'duration_secs': 0.255619} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.095468] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d/6f936641-750d-49ae-8beb-bca35305d10d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2016.097377] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef07f3b3-1dc0-4da3-b06f-9cc09b51ff5f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.104743] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2016.104743] env[62875]: value = "task-2179976" [ 2016.104743] env[62875]: _type = "Task" [ 2016.104743] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.115940] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179976, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.173518] env[62875]: DEBUG nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2016.249906] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179975, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.619195] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179976, 'name': Rename_Task, 'duration_secs': 0.248026} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.619504] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2016.619762] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-563ace5f-4cd3-4fa3-ad85-f3af56f2fa2e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.630046] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Waiting for the task: (returnval){ [ 2016.630046] env[62875]: value = "task-2179977" [ 2016.630046] env[62875]: _type = "Task" [ 2016.630046] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.637995] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.695651] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.706800] env[62875]: DEBUG nova.network.neutron [req-05dadecf-c10e-4350-8e29-248cfa812af7 req-578a4c5a-4a53-413a-ab37-e3f898454c47 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updated VIF entry in instance network info cache for port 6c87f73d-311a-4ed3-9d9f-5325a201e67e. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2016.707040] env[62875]: DEBUG nova.network.neutron [req-05dadecf-c10e-4350-8e29-248cfa812af7 req-578a4c5a-4a53-413a-ab37-e3f898454c47 service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updating instance_info_cache with network_info: [{"id": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "address": "fa:16:3e:e9:29:5c", "network": {"id": "3028cdbc-4b41-4102-bd32-59fd93a60bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-125987334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9e631a043ef4ed9ae37c18a142afa38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87f73d-31", "ovs_interfaceid": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.746934] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179975, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538196} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.746934] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] c6de797f-03f7-4dca-9c6a-e7b840990be6/c6de797f-03f7-4dca-9c6a-e7b840990be6.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2016.747365] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2016.747365] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-deced282-37dd-4520-9e42-bffd1d45dcde {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.753963] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2016.753963] env[62875]: value = "task-2179978" [ 2016.753963] env[62875]: _type = "Task" [ 2016.753963] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.766281] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179978, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.831514] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901e1e2b-f4df-4a90-bcec-4bf2c0753414 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.838621] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d44e5b1-30f2-463d-8bfa-16b046f4eaf2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.869780] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caba155d-bff3-4d16-9ee4-3ee0c7397c94 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.876895] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dde5acf-1ed3-4393-8683-468289117e57 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.889757] env[62875]: DEBUG nova.compute.provider_tree [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2017.140583] env[62875]: DEBUG oslo_vmware.api [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Task: {'id': task-2179977, 'name': PowerOnVM_Task, 'duration_secs': 0.475284} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.140866] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2017.141081] env[62875]: DEBUG nova.compute.manager [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2017.141848] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468c1024-1e8f-4d6c-94e5-cdfcc28d559f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.210596] env[62875]: DEBUG oslo_concurrency.lockutils [req-05dadecf-c10e-4350-8e29-248cfa812af7 req-578a4c5a-4a53-413a-ab37-e3f898454c47 service nova] Releasing lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.265235] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179978, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063256} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.265575] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2017.266434] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9d7092-5fdf-495b-a045-0beb53ad2c95 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.288580] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] c6de797f-03f7-4dca-9c6a-e7b840990be6/c6de797f-03f7-4dca-9c6a-e7b840990be6.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2017.288918] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94d66388-b177-405d-9b25-036819807642 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.309540] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2017.309540] env[62875]: value = "task-2179979" [ 2017.309540] env[62875]: _type = "Task" [ 2017.309540] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.319917] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179979, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.393215] env[62875]: DEBUG nova.scheduler.client.report [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2017.482185] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.656739] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.820021] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179979, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.898137] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.913s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.898807] env[62875]: ERROR nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Traceback (most recent call last): [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self.driver.spawn(context, instance, image_meta, [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] vm_ref = self.build_virtual_machine(instance, [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] vif_infos = vmwarevif.get_vif_info(self._session, [ 2017.898807] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] for vif in network_info: [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] return self._sync_wrapper(fn, *args, **kwargs) [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self.wait() [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self[:] = self._gt.wait() [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] return self._exit_event.wait() [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] result = hub.switch() [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2017.899383] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] return self.greenlet.switch() [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] result = function(*args, **kwargs) [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] return func(*args, **kwargs) [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] raise e [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] nwinfo = self.network_api.allocate_for_instance( [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] created_port_ids = self._update_ports_for_instance( [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] with excutils.save_and_reraise_exception(): [ 2017.899908] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] self.force_reraise() [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] raise self.value [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] updated_port = self._update_port( [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] _ensure_no_port_binding_failure(port) [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] raise exception.PortBindingFailed(port_id=port['id']) [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] nova.exception.PortBindingFailed: Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. [ 2017.900382] env[62875]: ERROR nova.compute.manager [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] [ 2017.900774] env[62875]: DEBUG nova.compute.utils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2017.900774] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.139s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.903537] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Build of instance 77d57f64-9bab-46f1-87b4-62bac5c5d2bd was re-scheduled: Binding failed for port 484f1d78-c983-4016-9554-d37475fe8aa6, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 2017.903960] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 2017.904197] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Acquiring lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.904370] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Acquired lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.905301] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2017.987325] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.321188] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179979, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.426635] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "6f936641-750d-49ae-8beb-bca35305d10d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.427280] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "6f936641-750d-49ae-8beb-bca35305d10d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.427626] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "6f936641-750d-49ae-8beb-bca35305d10d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.429030] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "6f936641-750d-49ae-8beb-bca35305d10d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.429030] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "6f936641-750d-49ae-8beb-bca35305d10d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.432972] env[62875]: INFO nova.compute.manager [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Terminating instance [ 2018.435034] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2018.516870] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.687962] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97375e47-2dc2-43a3-854e-a2039e2a8de2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.695878] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39725064-2dc6-4eb6-a9c5-e39e7f2ea931 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.726257] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9cc331-eddd-40b2-8ea3-bba3d3647c17 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.733357] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ecfd61-5e39-44b0-9e84-e0f7e1c9053a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.745910] env[62875]: DEBUG nova.compute.provider_tree [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.820834] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179979, 'name': ReconfigVM_Task, 'duration_secs': 1.276204} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.821144] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Reconfigured VM instance instance-00000029 to attach disk [datastore2] c6de797f-03f7-4dca-9c6a-e7b840990be6/c6de797f-03f7-4dca-9c6a-e7b840990be6.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2018.821706] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d355fe70-b5bd-4c3b-b433-eb49d9723ff9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.827851] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2018.827851] env[62875]: value = "task-2179980" [ 2018.827851] env[62875]: _type = "Task" [ 2018.827851] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.835501] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179980, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.939858] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "refresh_cache-6f936641-750d-49ae-8beb-bca35305d10d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.940199] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquired lock "refresh_cache-6f936641-750d-49ae-8beb-bca35305d10d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.940450] env[62875]: DEBUG nova.network.neutron [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2019.020165] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Releasing lock "refresh_cache-77d57f64-9bab-46f1-87b4-62bac5c5d2bd" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.020495] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 2019.020696] env[62875]: DEBUG nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2019.020870] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2019.035804] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2019.249362] env[62875]: DEBUG nova.scheduler.client.report [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2019.338419] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179980, 'name': Rename_Task, 'duration_secs': 0.145941} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.338675] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2019.338895] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11b46dc6-8f43-49ff-b30d-323ee3076078 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.345252] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2019.345252] env[62875]: value = "task-2179981" [ 2019.345252] env[62875]: _type = "Task" [ 2019.345252] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.352168] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.462539] env[62875]: DEBUG nova.network.neutron [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2019.512105] env[62875]: DEBUG nova.network.neutron [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.538627] env[62875]: DEBUG nova.network.neutron [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.754602] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.854s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.755322] env[62875]: ERROR nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Traceback (most recent call last): [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self.driver.spawn(context, instance, image_meta, [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] vm_ref = self.build_virtual_machine(instance, [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 2019.755322] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] for vif in network_info: [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] return self._sync_wrapper(fn, *args, **kwargs) [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self.wait() [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self[:] = self._gt.wait() [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] return self._exit_event.wait() [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] result = hub.switch() [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2019.755821] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] return self.greenlet.switch() [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] result = function(*args, **kwargs) [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] return func(*args, **kwargs) [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] raise e [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] nwinfo = self.network_api.allocate_for_instance( [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] created_port_ids = self._update_ports_for_instance( [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] with excutils.save_and_reraise_exception(): [ 2019.756266] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] self.force_reraise() [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] raise self.value [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] updated_port = self._update_port( [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] _ensure_no_port_binding_failure(port) [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] raise exception.PortBindingFailed(port_id=port['id']) [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] nova.exception.PortBindingFailed: Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. [ 2019.756711] env[62875]: ERROR nova.compute.manager [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] [ 2019.757141] env[62875]: DEBUG nova.compute.utils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2019.757408] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.246s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.760499] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Build of instance 5f190f86-2faa-4b8e-821f-2113577541e4 was re-scheduled: Binding failed for port 4d91b54d-40d6-45e3-b2cc-7db118f143cc, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 2019.760938] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 2019.761186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.761375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquired lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.761568] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2019.855833] env[62875]: DEBUG oslo_vmware.api [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179981, 'name': PowerOnVM_Task, 'duration_secs': 0.510165} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.856132] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2019.856324] env[62875]: INFO nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Took 8.85 seconds to spawn the instance on the hypervisor. [ 2019.856506] env[62875]: DEBUG nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2019.857311] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1e3fb5-2521-4cb8-a609-272ac419bf30 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.015731] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Releasing lock "refresh_cache-6f936641-750d-49ae-8beb-bca35305d10d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.016200] env[62875]: DEBUG nova.compute.manager [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2020.016456] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2020.017350] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7275d8ac-0666-4cdb-91af-922a0864b32b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.024725] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2020.024960] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8a05d1e-6382-476f-86cd-8112f50c936a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.031291] env[62875]: DEBUG oslo_vmware.api [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2020.031291] env[62875]: value = "task-2179982" [ 2020.031291] env[62875]: _type = "Task" [ 2020.031291] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.038881] env[62875]: DEBUG oslo_vmware.api [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179982, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.041463] env[62875]: INFO nova.compute.manager [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] [instance: 77d57f64-9bab-46f1-87b4-62bac5c5d2bd] Took 1.02 seconds to deallocate network for instance. [ 2020.282742] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2020.360292] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.375705] env[62875]: INFO nova.compute.manager [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Took 31.52 seconds to build instance. [ 2020.536553] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30941088-87e4-4b74-947c-0410993edd25 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.547260] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7eb2abc-aa68-4d21-abe3-c935bd519b63 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.552205] env[62875]: DEBUG oslo_vmware.api [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179982, 'name': PowerOffVM_Task, 'duration_secs': 0.199291} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.552672] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2020.552909] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2020.553447] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7722ace-362c-481c-b752-109af8572228 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.581556] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c634d0a6-9de1-4463-af39-509ffe6beba0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.583866] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2020.584149] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2020.584241] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Deleting the datastore file [datastore2] 6f936641-750d-49ae-8beb-bca35305d10d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2020.585058] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e22c7cd-8924-4371-a937-5d458da1b195 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.591773] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75799ee3-fb81-45f8-abfe-f136eddee27e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.596388] env[62875]: DEBUG oslo_vmware.api [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for the task: (returnval){ [ 2020.596388] env[62875]: value = "task-2179984" [ 2020.596388] env[62875]: _type = "Task" [ 2020.596388] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.607204] env[62875]: DEBUG nova.compute.provider_tree [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2020.612561] env[62875]: DEBUG oslo_vmware.api [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179984, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.800094] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "c6de797f-03f7-4dca-9c6a-e7b840990be6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.863432] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Releasing lock "refresh_cache-5f190f86-2faa-4b8e-821f-2113577541e4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.863830] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 2020.864028] env[62875]: DEBUG nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2020.864203] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2020.878439] env[62875]: DEBUG oslo_concurrency.lockutils [None req-06033a40-8a9f-40d9-aef1-e17ee7ae5360 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "c6de797f-03f7-4dca-9c6a-e7b840990be6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.122s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.879661] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "c6de797f-03f7-4dca-9c6a-e7b840990be6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.080s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.879878] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "c6de797f-03f7-4dca-9c6a-e7b840990be6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.880095] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "c6de797f-03f7-4dca-9c6a-e7b840990be6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.880294] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "c6de797f-03f7-4dca-9c6a-e7b840990be6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.882281] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2020.883524] env[62875]: INFO nova.compute.manager [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Terminating instance [ 2021.075901] env[62875]: INFO nova.scheduler.client.report [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Deleted allocations for instance 77d57f64-9bab-46f1-87b4-62bac5c5d2bd [ 2021.108319] env[62875]: DEBUG oslo_vmware.api [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Task: {'id': task-2179984, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11198} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.108319] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2021.108319] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2021.108319] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2021.108319] env[62875]: INFO nova.compute.manager [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Took 1.09 seconds to destroy the instance on the hypervisor. [ 2021.108851] env[62875]: DEBUG oslo.service.loopingcall [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2021.108851] env[62875]: DEBUG nova.compute.manager [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2021.108851] env[62875]: DEBUG nova.network.neutron [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2021.110680] env[62875]: DEBUG nova.scheduler.client.report [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2021.135541] env[62875]: DEBUG nova.network.neutron [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2021.385494] env[62875]: DEBUG nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2021.388625] env[62875]: DEBUG nova.network.neutron [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.390647] env[62875]: DEBUG nova.compute.manager [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2021.390647] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2021.390813] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6146f13a-42b6-4bea-a44b-501fa630f40c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.398990] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2021.399269] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ccab129-719b-4c77-8c18-a2c56171fb44 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.406889] env[62875]: DEBUG oslo_vmware.api [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2021.406889] env[62875]: value = "task-2179985" [ 2021.406889] env[62875]: _type = "Task" [ 2021.406889] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.415026] env[62875]: DEBUG oslo_vmware.api [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.583908] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fc8b67-27d1-43cc-af8c-449571f1093f tempest-ServersNegativeTestJSON-355935217 tempest-ServersNegativeTestJSON-355935217-project-member] Lock "77d57f64-9bab-46f1-87b4-62bac5c5d2bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.270s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.615234] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.858s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.615892] env[62875]: ERROR nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Traceback (most recent call last): [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self.driver.spawn(context, instance, image_meta, [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] vm_ref = self.build_virtual_machine(instance, [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] vif_infos = vmwarevif.get_vif_info(self._session, [ 2021.615892] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] for vif in network_info: [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] return self._sync_wrapper(fn, *args, **kwargs) [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self.wait() [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self[:] = self._gt.wait() [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] return self._exit_event.wait() [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] result = hub.switch() [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2021.616306] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] return self.greenlet.switch() [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] result = function(*args, **kwargs) [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] return func(*args, **kwargs) [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] raise e [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] nwinfo = self.network_api.allocate_for_instance( [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] created_port_ids = self._update_ports_for_instance( [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] with excutils.save_and_reraise_exception(): [ 2021.617303] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] self.force_reraise() [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] raise self.value [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] updated_port = self._update_port( [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] _ensure_no_port_binding_failure(port) [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] raise exception.PortBindingFailed(port_id=port['id']) [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] nova.exception.PortBindingFailed: Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. [ 2021.617705] env[62875]: ERROR nova.compute.manager [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] [ 2021.618066] env[62875]: DEBUG nova.compute.utils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2021.618066] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.253s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.620106] env[62875]: INFO nova.compute.claims [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2021.623260] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Build of instance 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738 was re-scheduled: Binding failed for port 2cdce133-68ef-43b8-9b99-27031c91d8e3, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 2021.624053] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 2021.624053] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquiring lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.624260] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Acquired lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.624260] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2021.637632] env[62875]: DEBUG nova.network.neutron [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.894994] env[62875]: INFO nova.compute.manager [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 5f190f86-2faa-4b8e-821f-2113577541e4] Took 1.03 seconds to deallocate network for instance. [ 2021.910815] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.915566] env[62875]: DEBUG oslo_vmware.api [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179985, 'name': PowerOffVM_Task, 'duration_secs': 0.155582} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.915831] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2021.916011] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2021.916259] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6355967a-381c-4eb0-8fca-2a695efe3720 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.992539] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2021.992787] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2021.992974] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Deleting the datastore file [datastore2] c6de797f-03f7-4dca-9c6a-e7b840990be6 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2021.993247] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-857549a9-7de1-4af9-9656-036b291c6717 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.999509] env[62875]: DEBUG oslo_vmware.api [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for the task: (returnval){ [ 2021.999509] env[62875]: value = "task-2179987" [ 2021.999509] env[62875]: _type = "Task" [ 2021.999509] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.007312] env[62875]: DEBUG oslo_vmware.api [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.087683] env[62875]: DEBUG nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2022.142079] env[62875]: INFO nova.compute.manager [-] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Took 1.03 seconds to deallocate network for instance. [ 2022.152040] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2022.281510] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.509578] env[62875]: DEBUG oslo_vmware.api [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Task: {'id': task-2179987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130156} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.512157] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2022.512157] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2022.512157] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2022.512157] env[62875]: INFO nova.compute.manager [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2022.512157] env[62875]: DEBUG oslo.service.loopingcall [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2022.512361] env[62875]: DEBUG nova.compute.manager [-] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2022.512361] env[62875]: DEBUG nova.network.neutron [-] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2022.615925] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.646937] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.783064] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Releasing lock "refresh_cache-1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.783064] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 2022.783064] env[62875]: DEBUG nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2022.783064] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2022.804858] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2022.902110] env[62875]: DEBUG nova.compute.manager [req-42c40552-63c2-4a41-97f8-4f798f274bb0 req-427b689d-d97f-4512-b342-0e6ace98368e service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Received event network-vif-deleted-08671000-5c6e-4076-b739-0d436002843b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2022.903285] env[62875]: INFO nova.compute.manager [req-42c40552-63c2-4a41-97f8-4f798f274bb0 req-427b689d-d97f-4512-b342-0e6ace98368e service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Neutron deleted interface 08671000-5c6e-4076-b739-0d436002843b; detaching it from the instance and deleting it from the info cache [ 2022.903285] env[62875]: DEBUG nova.network.neutron [req-42c40552-63c2-4a41-97f8-4f798f274bb0 req-427b689d-d97f-4512-b342-0e6ace98368e service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.923630] env[62875]: INFO nova.scheduler.client.report [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Deleted allocations for instance 5f190f86-2faa-4b8e-821f-2113577541e4 [ 2022.984828] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c6e5b3-8300-4b84-9969-36dbf81499b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.993407] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7377f9a-6d1f-4d83-a532-dfd7bda63350 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.026348] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed589f5-2d6b-4ed4-8cca-9e16a2f63169 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.033881] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9755daaa-6eab-42b6-8b76-2b79a2ecdb28 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.051163] env[62875]: DEBUG nova.compute.provider_tree [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2023.308174] env[62875]: DEBUG nova.network.neutron [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.383276] env[62875]: DEBUG nova.network.neutron [-] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.405306] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d5e93ff-4ccb-432f-8465-45f89ec77397 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.420514] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f314734-d4bb-4880-9292-30100f133a5e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.435073] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e0dd8b57-0e04-489d-97ee-535cd0ec232c tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "5f190f86-2faa-4b8e-821f-2113577541e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 142.488s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.450129] env[62875]: DEBUG nova.compute.manager [req-42c40552-63c2-4a41-97f8-4f798f274bb0 req-427b689d-d97f-4512-b342-0e6ace98368e service nova] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Detach interface failed, port_id=08671000-5c6e-4076-b739-0d436002843b, reason: Instance c6de797f-03f7-4dca-9c6a-e7b840990be6 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2023.554420] env[62875]: DEBUG nova.scheduler.client.report [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2023.810750] env[62875]: INFO nova.compute.manager [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] [instance: 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738] Took 1.03 seconds to deallocate network for instance. [ 2023.889952] env[62875]: INFO nova.compute.manager [-] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Took 1.38 seconds to deallocate network for instance. [ 2023.937893] env[62875]: DEBUG nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2024.060942] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.060942] env[62875]: DEBUG nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2024.063315] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.394s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.064716] env[62875]: INFO nova.compute.claims [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2024.397660] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.460601] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.572904] env[62875]: DEBUG nova.compute.utils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2024.574832] env[62875]: DEBUG nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2024.575013] env[62875]: DEBUG nova.network.neutron [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2024.614371] env[62875]: DEBUG nova.policy [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e24ac140d88e4488a61e014a59008a0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c8d2c567cad46ce8f6bdf4541da4ea5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2024.845388] env[62875]: INFO nova.scheduler.client.report [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Deleted allocations for instance 1874ad5f-2ae3-43ee-afcf-8f36dd2aa738 [ 2024.926407] env[62875]: DEBUG nova.network.neutron [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Successfully created port: 7e5216de-a51f-4db3-b650-8adf07f7ed9c {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2025.079697] env[62875]: DEBUG nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2025.315702] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17656bb-96b6-4512-9a60-33d45209e8c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.323624] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cb7683-7dde-4b37-990c-bff2b5fd8011 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.352844] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aed7d8c-9607-417e-84b9-ad827fc89cd4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.359670] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4d27a5-b0b8-4c46-80a1-b8351a7adbee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.364573] env[62875]: DEBUG oslo_concurrency.lockutils [None req-84cb7fce-c889-4e8c-a65f-88ac37548b65 tempest-ServerRescueNegativeTestJSON-1508498258 tempest-ServerRescueNegativeTestJSON-1508498258-project-member] Lock "1874ad5f-2ae3-43ee-afcf-8f36dd2aa738" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.629s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.374683] env[62875]: DEBUG nova.compute.provider_tree [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2025.706752] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2025.706752] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2025.869889] env[62875]: DEBUG nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2025.877499] env[62875]: DEBUG nova.scheduler.client.report [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2026.091593] env[62875]: DEBUG nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2026.115725] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2026.115983] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2026.116207] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2026.116436] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2026.117025] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2026.117025] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2026.117175] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2026.117372] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2026.117614] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2026.117829] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2026.118073] env[62875]: DEBUG nova.virt.hardware [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2026.119078] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f972728-f280-41d3-a14e-b8dbb74720b5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.126866] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5a24dd-c134-49e7-bbf6-10905d138266 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.341242] env[62875]: DEBUG nova.compute.manager [req-6de38a71-2b99-4fd1-833f-39669aeb37ba req-d83fd504-bea5-4263-9c90-9b091ce9aa58 service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Received event network-vif-plugged-7e5216de-a51f-4db3-b650-8adf07f7ed9c {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2026.341431] env[62875]: DEBUG oslo_concurrency.lockutils [req-6de38a71-2b99-4fd1-833f-39669aeb37ba req-d83fd504-bea5-4263-9c90-9b091ce9aa58 service nova] Acquiring lock "acc78084-21e8-456c-a573-fc5e931147c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.342643] env[62875]: DEBUG oslo_concurrency.lockutils [req-6de38a71-2b99-4fd1-833f-39669aeb37ba req-d83fd504-bea5-4263-9c90-9b091ce9aa58 service nova] Lock "acc78084-21e8-456c-a573-fc5e931147c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.343042] env[62875]: DEBUG oslo_concurrency.lockutils [req-6de38a71-2b99-4fd1-833f-39669aeb37ba req-d83fd504-bea5-4263-9c90-9b091ce9aa58 service nova] Lock "acc78084-21e8-456c-a573-fc5e931147c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.343123] env[62875]: DEBUG nova.compute.manager [req-6de38a71-2b99-4fd1-833f-39669aeb37ba req-d83fd504-bea5-4263-9c90-9b091ce9aa58 service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] No waiting events found dispatching network-vif-plugged-7e5216de-a51f-4db3-b650-8adf07f7ed9c {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2026.343258] env[62875]: WARNING nova.compute.manager [req-6de38a71-2b99-4fd1-833f-39669aeb37ba req-d83fd504-bea5-4263-9c90-9b091ce9aa58 service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Received unexpected event network-vif-plugged-7e5216de-a51f-4db3-b650-8adf07f7ed9c for instance with vm_state building and task_state spawning. [ 2026.382542] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.383111] env[62875]: DEBUG nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2026.388378] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.761s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.399088] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.489815] env[62875]: DEBUG nova.network.neutron [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Successfully updated port: 7e5216de-a51f-4db3-b650-8adf07f7ed9c {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2026.706682] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.706992] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.707098] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2026.897285] env[62875]: DEBUG nova.compute.utils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2026.899745] env[62875]: DEBUG nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2026.899745] env[62875]: DEBUG nova.network.neutron [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2026.936822] env[62875]: DEBUG nova.policy [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4d9fdcabb04463ab77f755534adc4c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9f5dffc6b134843ae4eb7dd00874521', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2026.997428] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "refresh_cache-acc78084-21e8-456c-a573-fc5e931147c6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2026.997580] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquired lock "refresh_cache-acc78084-21e8-456c-a573-fc5e931147c6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2026.997730] env[62875]: DEBUG nova.network.neutron [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2027.172105] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa9c383-49a4-4c7f-875e-9629757b2fab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.184608] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe1e341-d691-4b39-b398-ead487d98283 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.223456] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb83ca40-913c-4d6b-966f-b4aba4c9f665 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.231792] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba73121-f48f-45a9-abfe-9ad727772fb0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.245795] env[62875]: DEBUG nova.compute.provider_tree [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.257210] env[62875]: DEBUG nova.network.neutron [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Successfully created port: ba667cf6-8e2a-48bd-8e30-23d4df08e82b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2027.402509] env[62875]: DEBUG nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2027.530428] env[62875]: DEBUG nova.network.neutron [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2027.738107] env[62875]: DEBUG nova.network.neutron [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Updating instance_info_cache with network_info: [{"id": "7e5216de-a51f-4db3-b650-8adf07f7ed9c", "address": "fa:16:3e:43:a0:6c", "network": {"id": "fe3c4c14-27f2-4e41-839a-f97227bfffea", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1927554980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c8d2c567cad46ce8f6bdf4541da4ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e5216de-a5", "ovs_interfaceid": "7e5216de-a51f-4db3-b650-8adf07f7ed9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.748815] env[62875]: DEBUG nova.scheduler.client.report [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2028.241741] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Releasing lock "refresh_cache-acc78084-21e8-456c-a573-fc5e931147c6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2028.242096] env[62875]: DEBUG nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Instance network_info: |[{"id": "7e5216de-a51f-4db3-b650-8adf07f7ed9c", "address": "fa:16:3e:43:a0:6c", "network": {"id": "fe3c4c14-27f2-4e41-839a-f97227bfffea", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1927554980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c8d2c567cad46ce8f6bdf4541da4ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e5216de-a5", "ovs_interfaceid": "7e5216de-a51f-4db3-b650-8adf07f7ed9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2028.242553] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:a0:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e5216de-a51f-4db3-b650-8adf07f7ed9c', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2028.250301] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Creating folder: Project (2c8d2c567cad46ce8f6bdf4541da4ea5). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2028.250718] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f92d7ea9-b16b-4990-aded-6d03703449f0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.254789] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.866s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.255415] env[62875]: ERROR nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Traceback (most recent call last): [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self.driver.spawn(context, instance, image_meta, [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] vm_ref = self.build_virtual_machine(instance, [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] vif_infos = vmwarevif.get_vif_info(self._session, [ 2028.255415] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] for vif in network_info: [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] return self._sync_wrapper(fn, *args, **kwargs) [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self.wait() [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self[:] = self._gt.wait() [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] return self._exit_event.wait() [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] current.throw(*self._exc) [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2028.255809] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] result = function(*args, **kwargs) [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] return func(*args, **kwargs) [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] raise e [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] nwinfo = self.network_api.allocate_for_instance( [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] created_port_ids = self._update_ports_for_instance( [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] with excutils.save_and_reraise_exception(): [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] self.force_reraise() [ 2028.256297] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] raise self.value [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] updated_port = self._update_port( [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] _ensure_no_port_binding_failure(port) [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] raise exception.PortBindingFailed(port_id=port['id']) [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] nova.exception.PortBindingFailed: Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. [ 2028.256858] env[62875]: ERROR nova.compute.manager [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] [ 2028.256858] env[62875]: DEBUG nova.compute.utils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2028.257240] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.013s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.263020] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Build of instance 41ec8810-3f17-4f59-9828-a4a2e873eab4 was re-scheduled: Binding failed for port 14731902-25d4-4bb5-91c1-caece4a9215f, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 2028.263020] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 2028.263020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.263020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.263020] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2028.263521] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Created folder: Project (2c8d2c567cad46ce8f6bdf4541da4ea5) in parent group-v444854. [ 2028.263739] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Creating folder: Instances. Parent ref: group-v444873. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2028.263847] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ab9d6f9-cfc4-4edd-bf2f-295203bfeba7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.278008] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Created folder: Instances in parent group-v444873. [ 2028.278322] env[62875]: DEBUG oslo.service.loopingcall [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.278518] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2028.278740] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3af6c32b-bbaa-4c97-b62d-607cba6b9452 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.303547] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2028.303547] env[62875]: value = "task-2179990" [ 2028.303547] env[62875]: _type = "Task" [ 2028.303547] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.313200] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179990, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.414670] env[62875]: DEBUG nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2028.420150] env[62875]: DEBUG nova.compute.manager [req-ac9da7f5-ddda-4db4-a3c3-3226a7a5c6f2 req-0e43a2b6-29e4-4737-acbf-2f8e90ed9bce service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Received event network-changed-7e5216de-a51f-4db3-b650-8adf07f7ed9c {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2028.420338] env[62875]: DEBUG nova.compute.manager [req-ac9da7f5-ddda-4db4-a3c3-3226a7a5c6f2 req-0e43a2b6-29e4-4737-acbf-2f8e90ed9bce service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Refreshing instance network info cache due to event network-changed-7e5216de-a51f-4db3-b650-8adf07f7ed9c. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2028.420579] env[62875]: DEBUG oslo_concurrency.lockutils [req-ac9da7f5-ddda-4db4-a3c3-3226a7a5c6f2 req-0e43a2b6-29e4-4737-acbf-2f8e90ed9bce service nova] Acquiring lock "refresh_cache-acc78084-21e8-456c-a573-fc5e931147c6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.421169] env[62875]: DEBUG oslo_concurrency.lockutils [req-ac9da7f5-ddda-4db4-a3c3-3226a7a5c6f2 req-0e43a2b6-29e4-4737-acbf-2f8e90ed9bce service nova] Acquired lock "refresh_cache-acc78084-21e8-456c-a573-fc5e931147c6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.421357] env[62875]: DEBUG nova.network.neutron [req-ac9da7f5-ddda-4db4-a3c3-3226a7a5c6f2 req-0e43a2b6-29e4-4737-acbf-2f8e90ed9bce service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Refreshing network info cache for port 7e5216de-a51f-4db3-b650-8adf07f7ed9c {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2028.459490] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2028.459895] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2028.460088] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2028.460299] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2028.460457] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2028.460642] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2028.460920] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2028.461118] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2028.461302] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2028.461661] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2028.461876] env[62875]: DEBUG nova.virt.hardware [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2028.465229] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b206e82-f048-4fde-b3cf-d12640007981 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.474131] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b67bc41-82b3-402e-b7c8-c1b92d553809 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.785595] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2028.813489] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179990, 'name': CreateVM_Task, 'duration_secs': 0.321465} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.815728] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2028.816567] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.816740] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.817069] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2028.817324] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-741d8a06-fb0c-4f1c-b5f9-be56efe05bc7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.821564] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2028.821564] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52514318-b3f4-bef4-0149-ab6a3fb12c17" [ 2028.821564] env[62875]: _type = "Task" [ 2028.821564] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.831153] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52514318-b3f4-bef4-0149-ab6a3fb12c17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.904698] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2029.054844] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1846f4f-9f4f-463b-8caf-40dc39a3c434 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.064337] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4cdac4-bfb7-4f86-80a7-5f0e15531a05 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.098627] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582c5c9f-9cb0-4076-9e59-1ef6dbd96f83 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.101623] env[62875]: DEBUG nova.network.neutron [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Successfully updated port: ba667cf6-8e2a-48bd-8e30-23d4df08e82b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2029.108205] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0501a8f9-7a1f-4525-ad8c-c5f8fcad35c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.123027] env[62875]: DEBUG nova.compute.provider_tree [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2029.168570] env[62875]: DEBUG nova.network.neutron [req-ac9da7f5-ddda-4db4-a3c3-3226a7a5c6f2 req-0e43a2b6-29e4-4737-acbf-2f8e90ed9bce service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Updated VIF entry in instance network info cache for port 7e5216de-a51f-4db3-b650-8adf07f7ed9c. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2029.168968] env[62875]: DEBUG nova.network.neutron [req-ac9da7f5-ddda-4db4-a3c3-3226a7a5c6f2 req-0e43a2b6-29e4-4737-acbf-2f8e90ed9bce service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Updating instance_info_cache with network_info: [{"id": "7e5216de-a51f-4db3-b650-8adf07f7ed9c", "address": "fa:16:3e:43:a0:6c", "network": {"id": "fe3c4c14-27f2-4e41-839a-f97227bfffea", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1927554980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c8d2c567cad46ce8f6bdf4541da4ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e5216de-a5", "ovs_interfaceid": "7e5216de-a51f-4db3-b650-8adf07f7ed9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2029.332032] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52514318-b3f4-bef4-0149-ab6a3fb12c17, 'name': SearchDatastore_Task, 'duration_secs': 0.012577} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.332355] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.332599] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2029.332840] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2029.332991] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.333184] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2029.333442] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71300c07-831a-42a3-bb51-c05996e27801 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.341331] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2029.341506] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2029.342217] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-327c68cf-b05e-49fb-90bb-c1c300eb49e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.347323] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2029.347323] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eba7f9-e726-ad83-551d-63fb0a46de3a" [ 2029.347323] env[62875]: _type = "Task" [ 2029.347323] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.354659] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eba7f9-e726-ad83-551d-63fb0a46de3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.407465] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "refresh_cache-41ec8810-3f17-4f59-9828-a4a2e873eab4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.407736] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 2029.407925] env[62875]: DEBUG nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2029.408119] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2029.428469] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2029.604237] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquiring lock "refresh_cache-a64253fe-4ba9-4686-810b-a26a4c29631b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2029.604402] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquired lock "refresh_cache-a64253fe-4ba9-4686-810b-a26a4c29631b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.604556] env[62875]: DEBUG nova.network.neutron [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2029.627097] env[62875]: DEBUG nova.scheduler.client.report [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2029.671676] env[62875]: DEBUG oslo_concurrency.lockutils [req-ac9da7f5-ddda-4db4-a3c3-3226a7a5c6f2 req-0e43a2b6-29e4-4737-acbf-2f8e90ed9bce service nova] Releasing lock "refresh_cache-acc78084-21e8-456c-a573-fc5e931147c6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.857914] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eba7f9-e726-ad83-551d-63fb0a46de3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010505} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.858761] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-889d2b11-01ee-47b7-a284-5147f70c8095 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.863765] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2029.863765] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5299177f-ae67-778b-fd38-6472d296dd16" [ 2029.863765] env[62875]: _type = "Task" [ 2029.863765] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.871332] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5299177f-ae67-778b-fd38-6472d296dd16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.931631] env[62875]: DEBUG nova.network.neutron [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.133033] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.875s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.133252] env[62875]: ERROR nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Traceback (most recent call last): [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self.driver.spawn(context, instance, image_meta, [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] vm_ref = self.build_virtual_machine(instance, [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] vif_infos = vmwarevif.get_vif_info(self._session, [ 2030.133252] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] for vif in network_info: [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] return self._sync_wrapper(fn, *args, **kwargs) [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self.wait() [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self[:] = self._gt.wait() [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] return self._exit_event.wait() [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] result = hub.switch() [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2030.133592] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] return self.greenlet.switch() [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] result = function(*args, **kwargs) [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] return func(*args, **kwargs) [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] raise e [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] nwinfo = self.network_api.allocate_for_instance( [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] created_port_ids = self._update_ports_for_instance( [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] with excutils.save_and_reraise_exception(): [ 2030.133977] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] self.force_reraise() [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] raise self.value [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] updated_port = self._update_port( [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] _ensure_no_port_binding_failure(port) [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] raise exception.PortBindingFailed(port_id=port['id']) [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] nova.exception.PortBindingFailed: Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. [ 2030.134473] env[62875]: ERROR nova.compute.manager [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] [ 2030.134867] env[62875]: DEBUG nova.compute.utils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2030.135680] env[62875]: DEBUG nova.network.neutron [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2030.137532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.332s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.137708] env[62875]: DEBUG nova.objects.instance [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62875) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2030.140293] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Build of instance 5cf1f620-d0da-4e81-8d1f-e881c47dcad1 was re-scheduled: Binding failed for port 82cc5921-b44c-4ec7-8717-1f8dd3435cca, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 2030.140720] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 2030.140943] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Acquiring lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2030.141103] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Acquired lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.141281] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2030.273163] env[62875]: DEBUG nova.network.neutron [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Updating instance_info_cache with network_info: [{"id": "ba667cf6-8e2a-48bd-8e30-23d4df08e82b", "address": "fa:16:3e:c1:79:aa", "network": {"id": "d817c328-4a99-4f4b-b2d1-595b06da37b0", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1952809160-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9f5dffc6b134843ae4eb7dd00874521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f856fca-9fb5-41ea-a057-ac4193bd323d", "external-id": "nsx-vlan-transportzone-148", "segmentation_id": 148, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba667cf6-8e", "ovs_interfaceid": "ba667cf6-8e2a-48bd-8e30-23d4df08e82b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.373458] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5299177f-ae67-778b-fd38-6472d296dd16, 'name': SearchDatastore_Task, 'duration_secs': 0.010357} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.373710] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.373959] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] acc78084-21e8-456c-a573-fc5e931147c6/acc78084-21e8-456c-a573-fc5e931147c6.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2030.374217] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50ef2c11-ec1f-492d-a9ea-0452f9bf02f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.380516] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2030.380516] env[62875]: value = "task-2179991" [ 2030.380516] env[62875]: _type = "Task" [ 2030.380516] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.387524] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.434972] env[62875]: INFO nova.compute.manager [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 41ec8810-3f17-4f59-9828-a4a2e873eab4] Took 1.03 seconds to deallocate network for instance. [ 2030.445398] env[62875]: DEBUG nova.compute.manager [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Received event network-vif-plugged-ba667cf6-8e2a-48bd-8e30-23d4df08e82b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2030.446035] env[62875]: DEBUG oslo_concurrency.lockutils [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] Acquiring lock "a64253fe-4ba9-4686-810b-a26a4c29631b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.446035] env[62875]: DEBUG oslo_concurrency.lockutils [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] Lock "a64253fe-4ba9-4686-810b-a26a4c29631b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.446035] env[62875]: DEBUG oslo_concurrency.lockutils [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] Lock "a64253fe-4ba9-4686-810b-a26a4c29631b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.446295] env[62875]: DEBUG nova.compute.manager [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] No waiting events found dispatching network-vif-plugged-ba667cf6-8e2a-48bd-8e30-23d4df08e82b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2030.446473] env[62875]: WARNING nova.compute.manager [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Received unexpected event network-vif-plugged-ba667cf6-8e2a-48bd-8e30-23d4df08e82b for instance with vm_state building and task_state spawning. [ 2030.446877] env[62875]: DEBUG nova.compute.manager [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Received event network-changed-ba667cf6-8e2a-48bd-8e30-23d4df08e82b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2030.447092] env[62875]: DEBUG nova.compute.manager [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Refreshing instance network info cache due to event network-changed-ba667cf6-8e2a-48bd-8e30-23d4df08e82b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2030.447238] env[62875]: DEBUG oslo_concurrency.lockutils [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] Acquiring lock "refresh_cache-a64253fe-4ba9-4686-810b-a26a4c29631b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2030.677147] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2030.707394] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.775696] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Releasing lock "refresh_cache-a64253fe-4ba9-4686-810b-a26a4c29631b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.775914] env[62875]: DEBUG nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Instance network_info: |[{"id": "ba667cf6-8e2a-48bd-8e30-23d4df08e82b", "address": "fa:16:3e:c1:79:aa", "network": {"id": "d817c328-4a99-4f4b-b2d1-595b06da37b0", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1952809160-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9f5dffc6b134843ae4eb7dd00874521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f856fca-9fb5-41ea-a057-ac4193bd323d", "external-id": "nsx-vlan-transportzone-148", "segmentation_id": 148, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba667cf6-8e", "ovs_interfaceid": "ba667cf6-8e2a-48bd-8e30-23d4df08e82b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2030.776248] env[62875]: DEBUG oslo_concurrency.lockutils [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] Acquired lock "refresh_cache-a64253fe-4ba9-4686-810b-a26a4c29631b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.776428] env[62875]: DEBUG nova.network.neutron [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Refreshing network info cache for port ba667cf6-8e2a-48bd-8e30-23d4df08e82b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2030.778370] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:79:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f856fca-9fb5-41ea-a057-ac4193bd323d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba667cf6-8e2a-48bd-8e30-23d4df08e82b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2030.785401] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Creating folder: Project (c9f5dffc6b134843ae4eb7dd00874521). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2030.785997] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46493e8f-0951-43b6-8474-0210813a1971 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.801501] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Created folder: Project (c9f5dffc6b134843ae4eb7dd00874521) in parent group-v444854. [ 2030.802028] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Creating folder: Instances. Parent ref: group-v444876. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2030.802028] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31f32676-c2f1-47f2-b250-17ffcb2026f9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.813967] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Created folder: Instances in parent group-v444876. [ 2030.814266] env[62875]: DEBUG oslo.service.loopingcall [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2030.814928] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2030.814928] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd5547ae-2483-4c12-a9f1-bc4ef58ecdcd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.830484] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.838970] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2030.838970] env[62875]: value = "task-2179994" [ 2030.838970] env[62875]: _type = "Task" [ 2030.838970] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.845771] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179994, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.889186] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179991, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456851} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.889551] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] acc78084-21e8-456c-a573-fc5e931147c6/acc78084-21e8-456c-a573-fc5e931147c6.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2030.889769] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2030.890062] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71241277-361c-4e03-838a-c1523e0fcc38 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.896978] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2030.896978] env[62875]: value = "task-2179995" [ 2030.896978] env[62875]: _type = "Task" [ 2030.896978] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.904434] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179995, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.149533] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5e3aa0f8-5cd4-45df-88dc-294e9cb31e6f tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.150667] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.174s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.332864] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Releasing lock "refresh_cache-5cf1f620-d0da-4e81-8d1f-e881c47dcad1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2031.333113] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 2031.333301] env[62875]: DEBUG nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2031.333473] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2031.347104] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2179994, 'name': CreateVM_Task, 'duration_secs': 0.3566} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.347300] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2031.347939] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.348115] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2031.348432] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2031.348665] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9058225-1496-4636-8db1-8a85df0abd82 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.350797] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2031.355484] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2031.355484] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c95043-5cb4-b8c1-8f48-9c87e7cafbf5" [ 2031.355484] env[62875]: _type = "Task" [ 2031.355484] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.365105] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c95043-5cb4-b8c1-8f48-9c87e7cafbf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.406617] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179995, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065187} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.406898] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2031.407646] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7f8b72-8729-49e8-ab59-b445eb5652bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.428986] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] acc78084-21e8-456c-a573-fc5e931147c6/acc78084-21e8-456c-a573-fc5e931147c6.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2031.431672] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63e5b18f-b7b2-465d-9751-b51005cf8439 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.453355] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2031.453355] env[62875]: value = "task-2179996" [ 2031.453355] env[62875]: _type = "Task" [ 2031.453355] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.461148] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179996, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.476224] env[62875]: INFO nova.scheduler.client.report [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted allocations for instance 41ec8810-3f17-4f59-9828-a4a2e873eab4 [ 2031.583269] env[62875]: DEBUG nova.network.neutron [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Updated VIF entry in instance network info cache for port ba667cf6-8e2a-48bd-8e30-23d4df08e82b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2031.584030] env[62875]: DEBUG nova.network.neutron [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Updating instance_info_cache with network_info: [{"id": "ba667cf6-8e2a-48bd-8e30-23d4df08e82b", "address": "fa:16:3e:c1:79:aa", "network": {"id": "d817c328-4a99-4f4b-b2d1-595b06da37b0", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1952809160-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9f5dffc6b134843ae4eb7dd00874521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f856fca-9fb5-41ea-a057-ac4193bd323d", "external-id": "nsx-vlan-transportzone-148", "segmentation_id": 148, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba667cf6-8e", "ovs_interfaceid": "ba667cf6-8e2a-48bd-8e30-23d4df08e82b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.706906] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2031.853439] env[62875]: DEBUG nova.network.neutron [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.867349] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c95043-5cb4-b8c1-8f48-9c87e7cafbf5, 'name': SearchDatastore_Task, 'duration_secs': 0.011367} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.867644] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2031.867870] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2031.868109] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.868257] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2031.868437] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2031.868682] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-687c81dc-e26a-44cb-8652-18f5202b2ad2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.879559] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2031.879741] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2031.880482] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-839a6f30-c2fc-4438-8679-561bf55c7c46 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.888804] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2031.888804] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5248fb82-6926-b5ca-3e2f-94513e99a4a1" [ 2031.888804] env[62875]: _type = "Task" [ 2031.888804] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.895588] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5248fb82-6926-b5ca-3e2f-94513e99a4a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.896983] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955762d0-269f-43f9-99c0-a401fdddb592 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.903417] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52914b8f-c14b-46a6-bffc-14ff668b909c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.935024] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9ec905-9c86-4184-ac69-d31cf86036fb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.941892] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556f6393-effb-4426-bf19-0f1270ac6739 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.955047] env[62875]: DEBUG nova.compute.provider_tree [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2031.963667] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179996, 'name': ReconfigVM_Task, 'duration_secs': 0.318098} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.964512] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Reconfigured VM instance instance-0000002a to attach disk [datastore2] acc78084-21e8-456c-a573-fc5e931147c6/acc78084-21e8-456c-a573-fc5e931147c6.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2031.965126] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ea976db-1cef-4a7a-8215-26a6dd43f37c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.971350] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2031.971350] env[62875]: value = "task-2179997" [ 2031.971350] env[62875]: _type = "Task" [ 2031.971350] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.979684] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179997, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.986435] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a7635551-60d5-4572-a6f4-00ca94e40275 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "41ec8810-3f17-4f59-9828-a4a2e873eab4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.177s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.086661] env[62875]: DEBUG oslo_concurrency.lockutils [req-3b95c2b1-b7db-4e7f-becf-ba22eae3346f req-a5d0d488-96ec-4c9e-b427-75c120642932 service nova] Releasing lock "refresh_cache-a64253fe-4ba9-4686-810b-a26a4c29631b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.356232] env[62875]: INFO nova.compute.manager [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] [instance: 5cf1f620-d0da-4e81-8d1f-e881c47dcad1] Took 1.02 seconds to deallocate network for instance. [ 2032.399253] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5248fb82-6926-b5ca-3e2f-94513e99a4a1, 'name': SearchDatastore_Task, 'duration_secs': 0.010447} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.400056] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b8942a2-dc64-4659-aa60-f90bea69a8c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.405357] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2032.405357] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5200d748-ffed-cbf0-8652-41e5941fac03" [ 2032.405357] env[62875]: _type = "Task" [ 2032.405357] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.412825] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5200d748-ffed-cbf0-8652-41e5941fac03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.460361] env[62875]: DEBUG nova.scheduler.client.report [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2032.482274] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179997, 'name': Rename_Task, 'duration_secs': 0.148595} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.482538] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2032.482765] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4fb3c79-0442-4ffd-99a0-6ce399fd083a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.488486] env[62875]: DEBUG nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2032.491861] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2032.491861] env[62875]: value = "task-2179998" [ 2032.491861] env[62875]: _type = "Task" [ 2032.491861] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.504144] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.707843] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2032.707843] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 2032.918445] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5200d748-ffed-cbf0-8652-41e5941fac03, 'name': SearchDatastore_Task, 'duration_secs': 0.025277} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.918445] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.920640] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] a64253fe-4ba9-4686-810b-a26a4c29631b/a64253fe-4ba9-4686-810b-a26a4c29631b.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2032.920640] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed3b1440-e92a-409d-8a29-90b957ca8c5f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.925707] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2032.925707] env[62875]: value = "task-2179999" [ 2032.925707] env[62875]: _type = "Task" [ 2032.925707] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.936112] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2179999, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.965746] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.815s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.966442] env[62875]: ERROR nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Traceback (most recent call last): [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/compute/manager.py", line 2648, in _build_and_run_instance [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self.driver.spawn(context, instance, image_meta, [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] vm_ref = self.build_virtual_machine(instance, [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] vif_infos = vmwarevif.get_vif_info(self._session, [ 2032.966442] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] for vif in network_info: [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] return self._sync_wrapper(fn, *args, **kwargs) [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self.wait() [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self[:] = self._gt.wait() [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] return self._exit_event.wait() [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] result = hub.switch() [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2032.966836] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] return self.greenlet.switch() [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] result = function(*args, **kwargs) [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] return func(*args, **kwargs) [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/compute/manager.py", line 2018, in _allocate_network_async [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] raise e [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/compute/manager.py", line 1996, in _allocate_network_async [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] nwinfo = self.network_api.allocate_for_instance( [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] created_port_ids = self._update_ports_for_instance( [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] with excutils.save_and_reraise_exception(): [ 2032.967237] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] self.force_reraise() [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] raise self.value [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] updated_port = self._update_port( [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] _ensure_no_port_binding_failure(port) [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] raise exception.PortBindingFailed(port_id=port['id']) [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] nova.exception.PortBindingFailed: Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. [ 2032.967678] env[62875]: ERROR nova.compute.manager [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] [ 2032.968039] env[62875]: DEBUG nova.compute.utils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. {{(pid=62875) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2032.968603] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.273s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.970240] env[62875]: INFO nova.compute.claims [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2032.972998] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Build of instance fd7ba11a-18d1-4f96-a445-eedce740b0c3 was re-scheduled: Binding failed for port e046947c-decf-4f1d-b15d-bd447d3ebc74, please check neutron logs for more information. {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 2032.973454] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Unplugging VIFs for instance {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 2032.973713] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquiring lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2032.973896] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Acquired lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.974101] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2033.007027] env[62875]: DEBUG oslo_vmware.api [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2179998, 'name': PowerOnVM_Task, 'duration_secs': 0.461325} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.007027] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2033.007233] env[62875]: INFO nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Took 6.92 seconds to spawn the instance on the hypervisor. [ 2033.007416] env[62875]: DEBUG nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2033.008255] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4871c79e-e9b9-4f8c-a776-8f2b79ddf96e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.012402] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.209562] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] There are 1 instances to clean {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 2033.209863] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: f27770de-40f5-4d5a-8819-a62d8f9a320a] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2033.387149] env[62875]: INFO nova.scheduler.client.report [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Deleted allocations for instance 5cf1f620-d0da-4e81-8d1f-e881c47dcad1 [ 2033.435956] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2179999, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475249} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.436296] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] a64253fe-4ba9-4686-810b-a26a4c29631b/a64253fe-4ba9-4686-810b-a26a4c29631b.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2033.436438] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2033.436682] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-602f2005-3260-4014-b146-6b501c4d2ade {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.443614] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2033.443614] env[62875]: value = "task-2180000" [ 2033.443614] env[62875]: _type = "Task" [ 2033.443614] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.451823] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180000, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.502338] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2033.527929] env[62875]: INFO nova.compute.manager [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Took 33.19 seconds to build instance. [ 2033.588190] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.897867] env[62875]: DEBUG oslo_concurrency.lockutils [None req-82a0b2cd-f876-4e30-824b-1913bedd0dac tempest-InstanceActionsTestJSON-125358292 tempest-InstanceActionsTestJSON-125358292-project-member] Lock "5cf1f620-d0da-4e81-8d1f-e881c47dcad1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.330s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.954863] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180000, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0666} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.955164] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2033.955986] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599cb85e-4c14-489e-9be9-e273cf37200e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.977914] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] a64253fe-4ba9-4686-810b-a26a4c29631b/a64253fe-4ba9-4686-810b-a26a4c29631b.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2033.978558] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1480b105-958b-42c6-9351-9dd8135d204d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.001507] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2034.001507] env[62875]: value = "task-2180001" [ 2034.001507] env[62875]: _type = "Task" [ 2034.001507] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.011187] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180001, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.031488] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8ec7abe0-cec9-4eea-abc3-4e7ce9f9d2c1 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "acc78084-21e8-456c-a573-fc5e931147c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.076s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.090839] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Releasing lock "refresh_cache-fd7ba11a-18d1-4f96-a445-eedce740b0c3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.091453] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62875) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 2034.091779] env[62875]: DEBUG nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2034.092086] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2034.107404] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2034.259922] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dcd169-2938-4793-a229-62b9f9c798a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.268070] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5dd4e1-e2c3-436e-a957-75d705e6ca6b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.300229] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4d07c9-49c3-4adc-9e1d-12a7a1a2d97b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.305818] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "409b6902-f9ef-469b-a9db-4e93f764d199" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.306096] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "409b6902-f9ef-469b-a9db-4e93f764d199" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.311588] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4569ca9b-c7d8-4083-8e0e-2de4472f96c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.325471] env[62875]: DEBUG nova.compute.provider_tree [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2034.400868] env[62875]: DEBUG nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2034.512632] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180001, 'name': ReconfigVM_Task, 'duration_secs': 0.333487} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.512975] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Reconfigured VM instance instance-0000002b to attach disk [datastore2] a64253fe-4ba9-4686-810b-a26a4c29631b/a64253fe-4ba9-4686-810b-a26a4c29631b.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2034.513553] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5356d61-9f56-4acc-af9e-7d790fd50825 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.521141] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2034.521141] env[62875]: value = "task-2180002" [ 2034.521141] env[62875]: _type = "Task" [ 2034.521141] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.527852] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180002, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.534311] env[62875]: DEBUG nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2034.610104] env[62875]: DEBUG nova.network.neutron [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.713250] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2034.713435] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2034.713561] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 2034.828469] env[62875]: DEBUG nova.scheduler.client.report [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2034.925769] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.031728] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180002, 'name': Rename_Task, 'duration_secs': 0.134926} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.032092] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2035.032403] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69957f99-51c6-48a9-849f-3d9e3384d675 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.040882] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2035.040882] env[62875]: value = "task-2180003" [ 2035.040882] env[62875]: _type = "Task" [ 2035.040882] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.051136] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.053548] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.112982] env[62875]: INFO nova.compute.manager [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] [instance: fd7ba11a-18d1-4f96-a445-eedce740b0c3] Took 1.02 seconds to deallocate network for instance. [ 2035.219045] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 2035.219045] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 2035.237838] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-6f936641-750d-49ae-8beb-bca35305d10d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.237982] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-6f936641-750d-49ae-8beb-bca35305d10d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.238155] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2035.238308] env[62875]: DEBUG nova.objects.instance [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lazy-loading 'info_cache' on Instance uuid 6f936641-750d-49ae-8beb-bca35305d10d {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2035.333402] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.333951] env[62875]: DEBUG nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2035.336604] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.680s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.340019] env[62875]: DEBUG nova.objects.instance [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62875) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2035.556026] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180003, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.841601] env[62875]: DEBUG nova.compute.utils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2035.848961] env[62875]: DEBUG nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2035.849231] env[62875]: DEBUG nova.network.neutron [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2035.945263] env[62875]: DEBUG nova.policy [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52db0a44319f46939b47247136267ceb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5062c761ea34842a2f6179ae76f3465', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2036.054285] env[62875]: DEBUG oslo_vmware.api [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180003, 'name': PowerOnVM_Task, 'duration_secs': 0.522889} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.054738] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2036.055075] env[62875]: INFO nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Took 7.64 seconds to spawn the instance on the hypervisor. [ 2036.055367] env[62875]: DEBUG nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2036.056261] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fcdbf1-1fee-45a1-b916-3ce8f3997165 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.156342] env[62875]: INFO nova.scheduler.client.report [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Deleted allocations for instance fd7ba11a-18d1-4f96-a445-eedce740b0c3 [ 2036.261644] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2036.352908] env[62875]: DEBUG nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2036.356879] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a42e6db-908d-4108-91d6-c72aa4839072 tempest-ServersAdmin275Test-1880141486 tempest-ServersAdmin275Test-1880141486-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.359457] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.449s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.361244] env[62875]: INFO nova.compute.claims [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2036.442622] env[62875]: DEBUG nova.network.neutron [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Successfully created port: 09e96105-f947-4b3d-b097-d53f32948ee2 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2036.575273] env[62875]: INFO nova.compute.manager [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Took 35.94 seconds to build instance. [ 2036.665871] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ab9332-f674-4a40-9109-fd54dd4ae9a4 tempest-MigrationsAdminTest-1593535992 tempest-MigrationsAdminTest-1593535992-project-member] Lock "fd7ba11a-18d1-4f96-a445-eedce740b0c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.699s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.837765] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.023017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "acc78084-21e8-456c-a573-fc5e931147c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.023328] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "acc78084-21e8-456c-a573-fc5e931147c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.023535] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "acc78084-21e8-456c-a573-fc5e931147c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.023711] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "acc78084-21e8-456c-a573-fc5e931147c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.023882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "acc78084-21e8-456c-a573-fc5e931147c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.026154] env[62875]: INFO nova.compute.manager [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Terminating instance [ 2037.077792] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7794211e-3a23-45a5-8288-13e3e58d6876 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "a64253fe-4ba9-4686-810b-a26a4c29631b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.210s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.171363] env[62875]: DEBUG nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2037.342340] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-6f936641-750d-49ae-8beb-bca35305d10d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.342549] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2037.343191] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2037.343388] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2037.365127] env[62875]: DEBUG nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2037.406402] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2037.406640] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2037.406803] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2037.410022] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2037.410022] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2037.410022] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2037.410022] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2037.410022] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2037.410362] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2037.410362] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2037.411143] env[62875]: DEBUG nova.virt.hardware [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2037.412490] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cc2d4f-c945-494a-90a7-a6c02c0b34a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.426029] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292a28d2-8eff-448d-8500-a665b0fd8ada {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.529423] env[62875]: DEBUG nova.compute.manager [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2037.529680] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2037.530617] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785fa535-923d-40f7-bd38-9fbdba3fbabc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.540401] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2037.540655] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e530f74d-058e-465b-ad30-1135095ba9d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.548834] env[62875]: DEBUG oslo_vmware.api [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2037.548834] env[62875]: value = "task-2180004" [ 2037.548834] env[62875]: _type = "Task" [ 2037.548834] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.556629] env[62875]: DEBUG oslo_vmware.api [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2180004, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.580739] env[62875]: DEBUG nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2037.661890] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca68abc-7059-41d4-94f8-f99adb099e9d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.670360] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4715c8-66c5-46e7-b398-51974a12f10f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.707500] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd92399f-3e2e-4f9a-bb41-4549f2efba0b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.722039] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514d4915-176d-4f2b-a461-9ae17a373c62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.729112] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.737074] env[62875]: DEBUG nova.compute.provider_tree [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2037.832209] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquiring lock "a64253fe-4ba9-4686-810b-a26a4c29631b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.832524] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "a64253fe-4ba9-4686-810b-a26a4c29631b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.832727] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquiring lock "a64253fe-4ba9-4686-810b-a26a4c29631b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.832923] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "a64253fe-4ba9-4686-810b-a26a4c29631b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.833137] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "a64253fe-4ba9-4686-810b-a26a4c29631b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.836195] env[62875]: INFO nova.compute.manager [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Terminating instance [ 2037.845452] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.059469] env[62875]: DEBUG oslo_vmware.api [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2180004, 'name': PowerOffVM_Task, 'duration_secs': 0.258811} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.059814] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2038.060019] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2038.060316] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d29b5c34-a2dd-4580-ad06-46d630bd3d6d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.103010] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.152079] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2038.152327] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2038.152602] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Deleting the datastore file [datastore2] acc78084-21e8-456c-a573-fc5e931147c6 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2038.152817] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-558e864d-35dd-41ad-8a0b-9247884da0ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.161876] env[62875]: DEBUG oslo_vmware.api [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for the task: (returnval){ [ 2038.161876] env[62875]: value = "task-2180006" [ 2038.161876] env[62875]: _type = "Task" [ 2038.161876] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.174273] env[62875]: DEBUG oslo_vmware.api [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2180006, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.240665] env[62875]: DEBUG nova.scheduler.client.report [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2038.342190] env[62875]: DEBUG nova.compute.manager [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2038.342392] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2038.343307] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31e8d82-45e7-46cd-a6f7-0bbb6e8abb11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.351344] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2038.351698] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f786abf-2778-4f92-8b57-d95694a09307 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.358704] env[62875]: DEBUG oslo_vmware.api [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2038.358704] env[62875]: value = "task-2180007" [ 2038.358704] env[62875]: _type = "Task" [ 2038.358704] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.371150] env[62875]: DEBUG oslo_vmware.api [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.420382] env[62875]: DEBUG nova.compute.manager [req-d5958fb8-02d6-49e3-a1c9-3035efec7114 req-f537c4af-5703-41fd-bcab-3eb12e81d842 service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Received event network-vif-plugged-09e96105-f947-4b3d-b097-d53f32948ee2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2038.420382] env[62875]: DEBUG oslo_concurrency.lockutils [req-d5958fb8-02d6-49e3-a1c9-3035efec7114 req-f537c4af-5703-41fd-bcab-3eb12e81d842 service nova] Acquiring lock "305aebbe-f983-4826-b8c0-9854458f7d48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.420797] env[62875]: DEBUG oslo_concurrency.lockutils [req-d5958fb8-02d6-49e3-a1c9-3035efec7114 req-f537c4af-5703-41fd-bcab-3eb12e81d842 service nova] Lock "305aebbe-f983-4826-b8c0-9854458f7d48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.424110] env[62875]: DEBUG oslo_concurrency.lockutils [req-d5958fb8-02d6-49e3-a1c9-3035efec7114 req-f537c4af-5703-41fd-bcab-3eb12e81d842 service nova] Lock "305aebbe-f983-4826-b8c0-9854458f7d48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.424110] env[62875]: DEBUG nova.compute.manager [req-d5958fb8-02d6-49e3-a1c9-3035efec7114 req-f537c4af-5703-41fd-bcab-3eb12e81d842 service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] No waiting events found dispatching network-vif-plugged-09e96105-f947-4b3d-b097-d53f32948ee2 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2038.424110] env[62875]: WARNING nova.compute.manager [req-d5958fb8-02d6-49e3-a1c9-3035efec7114 req-f537c4af-5703-41fd-bcab-3eb12e81d842 service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Received unexpected event network-vif-plugged-09e96105-f947-4b3d-b097-d53f32948ee2 for instance with vm_state building and task_state spawning. [ 2038.676059] env[62875]: DEBUG oslo_vmware.api [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Task: {'id': task-2180006, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154217} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.676671] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2038.676953] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2038.677210] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2038.677423] env[62875]: INFO nova.compute.manager [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2038.677734] env[62875]: DEBUG oslo.service.loopingcall [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2038.677974] env[62875]: DEBUG nova.compute.manager [-] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2038.678085] env[62875]: DEBUG nova.network.neutron [-] [instance: acc78084-21e8-456c-a573-fc5e931147c6] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2038.709135] env[62875]: DEBUG nova.network.neutron [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Successfully updated port: 09e96105-f947-4b3d-b097-d53f32948ee2 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2038.746721] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.746879] env[62875]: DEBUG nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2038.750379] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.135s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.752700] env[62875]: INFO nova.compute.claims [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2038.870716] env[62875]: DEBUG oslo_vmware.api [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180007, 'name': PowerOffVM_Task, 'duration_secs': 0.352191} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.870716] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2038.870716] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2038.870921] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e632bf6-4055-4a00-aaba-20e352757e53 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.980049] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2038.980049] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2038.980049] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Deleting the datastore file [datastore2] a64253fe-4ba9-4686-810b-a26a4c29631b {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2038.980917] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-002620be-ad57-41ff-8cf9-a252d81e7fe1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.989615] env[62875]: DEBUG oslo_vmware.api [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for the task: (returnval){ [ 2038.989615] env[62875]: value = "task-2180009" [ 2038.989615] env[62875]: _type = "Task" [ 2038.989615] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.000022] env[62875]: DEBUG oslo_vmware.api [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180009, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.004150] env[62875]: DEBUG nova.compute.manager [req-585d1c60-bd64-43af-93b8-5eea5a33dbef req-28b927be-8e8a-4afb-89c7-4593e01b1ca3 service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Received event network-vif-deleted-7e5216de-a51f-4db3-b650-8adf07f7ed9c {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2039.004341] env[62875]: INFO nova.compute.manager [req-585d1c60-bd64-43af-93b8-5eea5a33dbef req-28b927be-8e8a-4afb-89c7-4593e01b1ca3 service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Neutron deleted interface 7e5216de-a51f-4db3-b650-8adf07f7ed9c; detaching it from the instance and deleting it from the info cache [ 2039.004510] env[62875]: DEBUG nova.network.neutron [req-585d1c60-bd64-43af-93b8-5eea5a33dbef req-28b927be-8e8a-4afb-89c7-4593e01b1ca3 service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.215405] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-305aebbe-f983-4826-b8c0-9854458f7d48" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.215405] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-305aebbe-f983-4826-b8c0-9854458f7d48" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.215570] env[62875]: DEBUG nova.network.neutron [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2039.259652] env[62875]: DEBUG nova.compute.utils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2039.261295] env[62875]: DEBUG nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2039.261952] env[62875]: DEBUG nova.network.neutron [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2039.305769] env[62875]: DEBUG nova.policy [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f084cb7f15c34a5084fc916ea512429f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e44fd2f4e6e4eda8ddcdc7859a7fbd0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2039.485536] env[62875]: DEBUG nova.network.neutron [-] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.503032] env[62875]: DEBUG oslo_vmware.api [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Task: {'id': task-2180009, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.339072} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.503032] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2039.503032] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2039.503032] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2039.503032] env[62875]: INFO nova.compute.manager [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2039.503273] env[62875]: DEBUG oslo.service.loopingcall [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2039.503353] env[62875]: DEBUG nova.compute.manager [-] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2039.503890] env[62875]: DEBUG nova.network.neutron [-] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2039.511190] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d28a15e-0d30-432e-826c-3d1ed77d314c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.518873] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c856770-9e9e-4fa8-9f9b-a28d4c8e2bda {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.544636] env[62875]: DEBUG nova.compute.manager [req-585d1c60-bd64-43af-93b8-5eea5a33dbef req-28b927be-8e8a-4afb-89c7-4593e01b1ca3 service nova] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Detach interface failed, port_id=7e5216de-a51f-4db3-b650-8adf07f7ed9c, reason: Instance acc78084-21e8-456c-a573-fc5e931147c6 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2039.685404] env[62875]: DEBUG nova.network.neutron [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Successfully created port: b429f7a6-ae5e-41e1-9212-82b051f2e35b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2039.768020] env[62875]: DEBUG nova.network.neutron [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2039.771865] env[62875]: DEBUG nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2039.990223] env[62875]: INFO nova.compute.manager [-] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Took 1.31 seconds to deallocate network for instance. [ 2039.998549] env[62875]: DEBUG nova.network.neutron [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Updating instance_info_cache with network_info: [{"id": "09e96105-f947-4b3d-b097-d53f32948ee2", "address": "fa:16:3e:80:28:28", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09e96105-f9", "ovs_interfaceid": "09e96105-f947-4b3d-b097-d53f32948ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.039470] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7594d67-04d2-4e6e-aee7-21d10fcc4779 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.047718] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32ae60d-69bf-40ba-8128-4a41e709d728 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.080665] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1245632f-99f5-454c-a1f0-9ad7588ae63e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.088722] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a932f6-0fa9-4e0b-b5ed-c63d7b456be1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.102663] env[62875]: DEBUG nova.compute.provider_tree [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2040.287403] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquiring lock "1230e54c-701a-4406-95bd-14e32914bc8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.287635] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "1230e54c-701a-4406-95bd-14e32914bc8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.373738] env[62875]: DEBUG nova.network.neutron [-] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.497031] env[62875]: DEBUG nova.compute.manager [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Received event network-changed-09e96105-f947-4b3d-b097-d53f32948ee2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2040.497031] env[62875]: DEBUG nova.compute.manager [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Refreshing instance network info cache due to event network-changed-09e96105-f947-4b3d-b097-d53f32948ee2. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2040.497172] env[62875]: DEBUG oslo_concurrency.lockutils [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] Acquiring lock "refresh_cache-305aebbe-f983-4826-b8c0-9854458f7d48" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.498128] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.500728] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-305aebbe-f983-4826-b8c0-9854458f7d48" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.501009] env[62875]: DEBUG nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Instance network_info: |[{"id": "09e96105-f947-4b3d-b097-d53f32948ee2", "address": "fa:16:3e:80:28:28", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09e96105-f9", "ovs_interfaceid": "09e96105-f947-4b3d-b097-d53f32948ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2040.501580] env[62875]: DEBUG oslo_concurrency.lockutils [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] Acquired lock "refresh_cache-305aebbe-f983-4826-b8c0-9854458f7d48" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.501775] env[62875]: DEBUG nova.network.neutron [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Refreshing network info cache for port 09e96105-f947-4b3d-b097-d53f32948ee2 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2040.503196] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:28:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b2049d7-f99e-425a-afdb-2c95ca88e483', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09e96105-f947-4b3d-b097-d53f32948ee2', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2040.510789] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating folder: Project (e5062c761ea34842a2f6179ae76f3465). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2040.512050] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c51c21f-a16c-4809-918a-29bedde924a0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.523914] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Created folder: Project (e5062c761ea34842a2f6179ae76f3465) in parent group-v444854. [ 2040.524146] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating folder: Instances. Parent ref: group-v444879. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2040.524369] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd841d9e-b14e-444b-bb1a-59770d22da39 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.533839] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Created folder: Instances in parent group-v444879. [ 2040.534089] env[62875]: DEBUG oslo.service.loopingcall [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2040.534276] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2040.534473] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03afcb0c-be03-4134-b407-fa7760912307 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.554462] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2040.554462] env[62875]: value = "task-2180012" [ 2040.554462] env[62875]: _type = "Task" [ 2040.554462] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.563990] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180012, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.605872] env[62875]: DEBUG nova.scheduler.client.report [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2040.788339] env[62875]: DEBUG nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2040.821820] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2040.822066] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2040.822268] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2040.822405] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2040.824019] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2040.824019] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2040.824019] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2040.824019] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2040.824019] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2040.824824] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2040.824824] env[62875]: DEBUG nova.virt.hardware [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2040.824824] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f5c4b4-8ec2-4124-b8d6-beb3fa266613 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.835016] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd337ba3-f39c-4fa2-8e43-84b94afeeadc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.877026] env[62875]: INFO nova.compute.manager [-] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Took 1.37 seconds to deallocate network for instance. [ 2041.064520] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180012, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.110437] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.111007] env[62875]: DEBUG nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2041.113522] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.467s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.113855] env[62875]: DEBUG nova.objects.instance [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lazy-loading 'resources' on Instance uuid 6f936641-750d-49ae-8beb-bca35305d10d {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2041.312278] env[62875]: DEBUG nova.network.neutron [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Updated VIF entry in instance network info cache for port 09e96105-f947-4b3d-b097-d53f32948ee2. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2041.312650] env[62875]: DEBUG nova.network.neutron [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Updating instance_info_cache with network_info: [{"id": "09e96105-f947-4b3d-b097-d53f32948ee2", "address": "fa:16:3e:80:28:28", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09e96105-f9", "ovs_interfaceid": "09e96105-f947-4b3d-b097-d53f32948ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.331854] env[62875]: DEBUG nova.compute.manager [req-3accdbbb-410a-4f2c-9888-c839b7a1ba7c req-194ad9f4-00d0-4879-899a-07b553bc9a6d service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Received event network-vif-plugged-b429f7a6-ae5e-41e1-9212-82b051f2e35b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2041.332090] env[62875]: DEBUG oslo_concurrency.lockutils [req-3accdbbb-410a-4f2c-9888-c839b7a1ba7c req-194ad9f4-00d0-4879-899a-07b553bc9a6d service nova] Acquiring lock "c1e107cd-5c03-405f-bdae-3281dc4844d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.332304] env[62875]: DEBUG oslo_concurrency.lockutils [req-3accdbbb-410a-4f2c-9888-c839b7a1ba7c req-194ad9f4-00d0-4879-899a-07b553bc9a6d service nova] Lock "c1e107cd-5c03-405f-bdae-3281dc4844d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.332473] env[62875]: DEBUG oslo_concurrency.lockutils [req-3accdbbb-410a-4f2c-9888-c839b7a1ba7c req-194ad9f4-00d0-4879-899a-07b553bc9a6d service nova] Lock "c1e107cd-5c03-405f-bdae-3281dc4844d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.332642] env[62875]: DEBUG nova.compute.manager [req-3accdbbb-410a-4f2c-9888-c839b7a1ba7c req-194ad9f4-00d0-4879-899a-07b553bc9a6d service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] No waiting events found dispatching network-vif-plugged-b429f7a6-ae5e-41e1-9212-82b051f2e35b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2041.332805] env[62875]: WARNING nova.compute.manager [req-3accdbbb-410a-4f2c-9888-c839b7a1ba7c req-194ad9f4-00d0-4879-899a-07b553bc9a6d service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Received unexpected event network-vif-plugged-b429f7a6-ae5e-41e1-9212-82b051f2e35b for instance with vm_state building and task_state spawning. [ 2041.365917] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "45403db3-ff20-42d3-8a37-8db671d8c1fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.366176] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "45403db3-ff20-42d3-8a37-8db671d8c1fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.383181] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.397252] env[62875]: DEBUG nova.network.neutron [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Successfully updated port: b429f7a6-ae5e-41e1-9212-82b051f2e35b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2041.565389] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180012, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.617029] env[62875]: DEBUG nova.compute.utils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2041.621741] env[62875]: DEBUG nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2041.621936] env[62875]: DEBUG nova.network.neutron [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2041.662396] env[62875]: DEBUG nova.policy [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'daec14670b7246d1847a9fa8430402cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d6fc1be135a4350ab17e4e4d59f0617', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2041.816852] env[62875]: DEBUG oslo_concurrency.lockutils [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] Releasing lock "refresh_cache-305aebbe-f983-4826-b8c0-9854458f7d48" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.816852] env[62875]: DEBUG nova.compute.manager [req-3f9c2500-71f5-4d10-9d58-9a5a2753062f req-eb6974d8-bfca-43e6-9e9e-cca18eeb6235 service nova] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Received event network-vif-deleted-ba667cf6-8e2a-48bd-8e30-23d4df08e82b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2041.844933] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28ad05a-31a8-4b65-81d2-16f7484a378c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.854974] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc202b9-d7e1-43cb-be86-b61d807dd2fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.887529] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ce27f5-c57d-4302-9c46-8103f4017d8a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.894920] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a40b722-982e-4768-8898-1dabbcc77224 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.899104] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquiring lock "refresh_cache-c1e107cd-5c03-405f-bdae-3281dc4844d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.899255] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquired lock "refresh_cache-c1e107cd-5c03-405f-bdae-3281dc4844d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.899417] env[62875]: DEBUG nova.network.neutron [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2041.910712] env[62875]: DEBUG nova.compute.provider_tree [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2041.960802] env[62875]: DEBUG nova.network.neutron [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2041.972053] env[62875]: DEBUG nova.network.neutron [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Successfully created port: 1e776d51-5ada-4779-87c3-320b11662f1d {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2042.065721] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180012, 'name': CreateVM_Task, 'duration_secs': 1.351149} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.065934] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2042.067029] env[62875]: DEBUG oslo_vmware.service [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c91043-b7c6-4183-8c96-feead5841302 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.072786] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2042.073029] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2042.073406] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2042.073711] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a73b1e32-c4f5-4fe0-ad70-efc96e843f0a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.078038] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2042.078038] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524db19f-79d9-f550-ef37-7c6d53071303" [ 2042.078038] env[62875]: _type = "Task" [ 2042.078038] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.095162] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.095459] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2042.095723] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2042.095901] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2042.096120] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2042.096383] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f66d492b-35f4-46dc-8703-ec72bcfad1df {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.104379] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2042.104594] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2042.105354] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63013ce-5502-4806-b6b0-76c983dd4e7d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.111569] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01091492-9226-49bf-aa41-432ba66c4a14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.116649] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2042.116649] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521404e0-95ad-e222-44c0-413cd05071e4" [ 2042.116649] env[62875]: _type = "Task" [ 2042.116649] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.122349] env[62875]: DEBUG nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2042.129442] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521404e0-95ad-e222-44c0-413cd05071e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.347551] env[62875]: DEBUG nova.network.neutron [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Updating instance_info_cache with network_info: [{"id": "b429f7a6-ae5e-41e1-9212-82b051f2e35b", "address": "fa:16:3e:5b:70:f1", "network": {"id": "5f3abcb9-ec79-4068-80e2-985f16699f95", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1520423285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e44fd2f4e6e4eda8ddcdc7859a7fbd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb429f7a6-ae", "ovs_interfaceid": "b429f7a6-ae5e-41e1-9212-82b051f2e35b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.415938] env[62875]: DEBUG nova.scheduler.client.report [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2042.628145] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Preparing fetch location {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2042.628454] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating directory with path [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2042.628739] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc8dbc74-309e-4840-a39e-2734658f07cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.645543] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Created directory with path [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2042.645986] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Fetch image to [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2042.646213] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Downloading image file data a9637bcc-4de8-4ea1-be59-4c697becf2a7 to [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62875) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2042.647028] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54370ae9-bc63-4c72-9064-b144ab816dd9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.654710] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0227ec-d805-4424-864a-8bcb8dfbd933 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.663915] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96887ea8-f7fa-41f3-8b61-d50f62345749 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.694975] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37bca343-be7b-4f3b-ac78-4ab29ecbf3ae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.700746] env[62875]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c92a4035-65b1-47e6-a150-8ae36667cc08 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.785984] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Downloading image file data a9637bcc-4de8-4ea1-be59-4c697becf2a7 to the data store datastore1 {{(pid=62875) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2042.834762] env[62875]: DEBUG oslo_vmware.rw_handles [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62875) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2042.890082] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Releasing lock "refresh_cache-c1e107cd-5c03-405f-bdae-3281dc4844d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.890400] env[62875]: DEBUG nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Instance network_info: |[{"id": "b429f7a6-ae5e-41e1-9212-82b051f2e35b", "address": "fa:16:3e:5b:70:f1", "network": {"id": "5f3abcb9-ec79-4068-80e2-985f16699f95", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1520423285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e44fd2f4e6e4eda8ddcdc7859a7fbd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb429f7a6-ae", "ovs_interfaceid": "b429f7a6-ae5e-41e1-9212-82b051f2e35b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2042.890799] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:70:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b429f7a6-ae5e-41e1-9212-82b051f2e35b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2042.898401] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Creating folder: Project (1e44fd2f4e6e4eda8ddcdc7859a7fbd0). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2042.899704] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6636ac58-9f1c-45d6-9e4d-12e0b3d9a42a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.912359] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Created folder: Project (1e44fd2f4e6e4eda8ddcdc7859a7fbd0) in parent group-v444854. [ 2042.912649] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Creating folder: Instances. Parent ref: group-v444882. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2042.912977] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6dda3cc-d857-438e-844b-0b0447eefe02 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.920913] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.807s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.926503] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.529s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.926820] env[62875]: DEBUG nova.objects.instance [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lazy-loading 'resources' on Instance uuid c6de797f-03f7-4dca-9c6a-e7b840990be6 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2042.928721] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Created folder: Instances in parent group-v444882. [ 2042.930831] env[62875]: DEBUG oslo.service.loopingcall [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2042.932720] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2042.932973] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f704687-a9d2-4112-b15c-45aaf0bd9187 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.949595] env[62875]: INFO nova.scheduler.client.report [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Deleted allocations for instance 6f936641-750d-49ae-8beb-bca35305d10d [ 2042.956544] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2042.956544] env[62875]: value = "task-2180015" [ 2042.956544] env[62875]: _type = "Task" [ 2042.956544] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.968711] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180015, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.137260] env[62875]: DEBUG nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2043.164013] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2043.164318] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2043.164484] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2043.164668] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2043.164814] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2043.164981] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2043.165221] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2043.165498] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2043.165689] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2043.165931] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2043.166181] env[62875]: DEBUG nova.virt.hardware [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2043.167138] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa9cc13-8771-4c55-a162-8a58e29c2ad8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.178726] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5729c4a7-4cf3-4bd1-86fa-c02640263f9b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.370526] env[62875]: DEBUG nova.compute.manager [req-ab48459d-fc61-4a11-b47d-03af5ffe6f31 req-c04a303c-92e8-4ae7-b840-5f9876353233 service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Received event network-changed-b429f7a6-ae5e-41e1-9212-82b051f2e35b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2043.370762] env[62875]: DEBUG nova.compute.manager [req-ab48459d-fc61-4a11-b47d-03af5ffe6f31 req-c04a303c-92e8-4ae7-b840-5f9876353233 service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Refreshing instance network info cache due to event network-changed-b429f7a6-ae5e-41e1-9212-82b051f2e35b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2043.371171] env[62875]: DEBUG oslo_concurrency.lockutils [req-ab48459d-fc61-4a11-b47d-03af5ffe6f31 req-c04a303c-92e8-4ae7-b840-5f9876353233 service nova] Acquiring lock "refresh_cache-c1e107cd-5c03-405f-bdae-3281dc4844d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2043.371247] env[62875]: DEBUG oslo_concurrency.lockutils [req-ab48459d-fc61-4a11-b47d-03af5ffe6f31 req-c04a303c-92e8-4ae7-b840-5f9876353233 service nova] Acquired lock "refresh_cache-c1e107cd-5c03-405f-bdae-3281dc4844d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2043.375035] env[62875]: DEBUG nova.network.neutron [req-ab48459d-fc61-4a11-b47d-03af5ffe6f31 req-c04a303c-92e8-4ae7-b840-5f9876353233 service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Refreshing network info cache for port b429f7a6-ae5e-41e1-9212-82b051f2e35b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2043.460307] env[62875]: DEBUG oslo_concurrency.lockutils [None req-33676038-e65c-43e9-a70c-2fed5beeb1fd tempest-ServersAdmin275Test-951525081 tempest-ServersAdmin275Test-951525081-project-member] Lock "6f936641-750d-49ae-8beb-bca35305d10d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.033s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.462240] env[62875]: DEBUG nova.network.neutron [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Successfully updated port: 1e776d51-5ada-4779-87c3-320b11662f1d {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2043.481433] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180015, 'name': CreateVM_Task, 'duration_secs': 0.341442} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.485468] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2043.486390] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2043.486579] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2043.486892] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2043.487174] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f4492a8-2c5a-4511-be1e-52762cb626f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.496090] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2043.496090] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520d63ad-4e60-9015-78b1-ec81b749d61c" [ 2043.496090] env[62875]: _type = "Task" [ 2043.496090] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.512518] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520d63ad-4e60-9015-78b1-ec81b749d61c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.524493] env[62875]: DEBUG oslo_vmware.rw_handles [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Completed reading data from the image iterator. {{(pid=62875) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2043.524753] env[62875]: DEBUG oslo_vmware.rw_handles [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2043.657246] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Downloaded image file data a9637bcc-4de8-4ea1-be59-4c697becf2a7 to vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62875) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2043.659705] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Caching image {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2043.659976] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copying Virtual Disk [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk to [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2043.660296] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3484c8ce-fc61-4687-910a-bf456c1d3847 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.669152] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2043.669152] env[62875]: value = "task-2180016" [ 2043.669152] env[62875]: _type = "Task" [ 2043.669152] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.678182] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.694011] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbd0c00-f46a-45ee-a166-63ceab32bcee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.701898] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7671b7e2-6fba-418b-91f2-178a2bed8f65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.509723] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquiring lock "refresh_cache-37ae8e69-f953-4846-8a21-fed697ea575a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.510019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquired lock "refresh_cache-37ae8e69-f953-4846-8a21-fed697ea575a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.510019] env[62875]: DEBUG nova.network.neutron [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2044.517588] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8258ecca-54f6-4d2b-9af5-db97b64a9606 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.529983] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520d63ad-4e60-9015-78b1-ec81b749d61c, 'name': SearchDatastore_Task, 'duration_secs': 0.010154} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.534304] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2044.534544] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2044.534770] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.534916] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.535104] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2044.535653] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705735} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.536228] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7e0af66-5106-45db-95ab-ffd94011ab35 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.538022] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copied Virtual Disk [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk to [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2044.538214] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleting the datastore file [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7/tmp-sparse.vmdk {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2044.540250] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fde39d5d-1ac2-419a-aa52-4eb98dde85dc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.543332] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01a0d1e-fa62-46f1-9557-24845e4e85f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.548335] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2044.548507] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2044.549469] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5caeec6-e034-4a61-8390-36b12c9569c6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.561848] env[62875]: DEBUG nova.compute.provider_tree [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2044.563285] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2044.563285] env[62875]: value = "task-2180017" [ 2044.563285] env[62875]: _type = "Task" [ 2044.563285] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.569484] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2044.569484] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527f5681-db3c-2883-c76b-3fe32f5ed721" [ 2044.569484] env[62875]: _type = "Task" [ 2044.569484] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.575977] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180017, 'name': DeleteDatastoreFile_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.576139] env[62875]: DEBUG nova.network.neutron [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2044.582380] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527f5681-db3c-2883-c76b-3fe32f5ed721, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.759096] env[62875]: DEBUG nova.network.neutron [req-ab48459d-fc61-4a11-b47d-03af5ffe6f31 req-c04a303c-92e8-4ae7-b840-5f9876353233 service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Updated VIF entry in instance network info cache for port b429f7a6-ae5e-41e1-9212-82b051f2e35b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2044.759545] env[62875]: DEBUG nova.network.neutron [req-ab48459d-fc61-4a11-b47d-03af5ffe6f31 req-c04a303c-92e8-4ae7-b840-5f9876353233 service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Updating instance_info_cache with network_info: [{"id": "b429f7a6-ae5e-41e1-9212-82b051f2e35b", "address": "fa:16:3e:5b:70:f1", "network": {"id": "5f3abcb9-ec79-4068-80e2-985f16699f95", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1520423285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e44fd2f4e6e4eda8ddcdc7859a7fbd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb429f7a6-ae", "ovs_interfaceid": "b429f7a6-ae5e-41e1-9212-82b051f2e35b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2044.769973] env[62875]: DEBUG nova.network.neutron [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Updating instance_info_cache with network_info: [{"id": "1e776d51-5ada-4779-87c3-320b11662f1d", "address": "fa:16:3e:9b:30:50", "network": {"id": "80ef4e5d-45f4-48c0-85f5-828f90a0228c", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-165871578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d6fc1be135a4350ab17e4e4d59f0617", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e776d51-5a", "ovs_interfaceid": "1e776d51-5ada-4779-87c3-320b11662f1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.065251] env[62875]: DEBUG nova.scheduler.client.report [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2045.079993] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022457} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.083847] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2045.084077] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Moving file from [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9/a9637bcc-4de8-4ea1-be59-4c697becf2a7 to [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7. {{(pid=62875) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 2045.084337] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527f5681-db3c-2883-c76b-3fe32f5ed721, 'name': SearchDatastore_Task, 'duration_secs': 0.01746} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.084524] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-33642e57-b21d-481d-abb1-fa79e5a53506 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.086973] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcd18904-266d-4e0c-8cdf-070da7288ec2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.093440] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2045.093440] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52945355-c1e9-8ec5-6c5a-609f7d898f31" [ 2045.093440] env[62875]: _type = "Task" [ 2045.093440] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.094499] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2045.094499] env[62875]: value = "task-2180018" [ 2045.094499] env[62875]: _type = "Task" [ 2045.094499] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.104068] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52945355-c1e9-8ec5-6c5a-609f7d898f31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.107013] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180018, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.262125] env[62875]: DEBUG oslo_concurrency.lockutils [req-ab48459d-fc61-4a11-b47d-03af5ffe6f31 req-c04a303c-92e8-4ae7-b840-5f9876353233 service nova] Releasing lock "refresh_cache-c1e107cd-5c03-405f-bdae-3281dc4844d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.273054] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Releasing lock "refresh_cache-37ae8e69-f953-4846-8a21-fed697ea575a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.273244] env[62875]: DEBUG nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Instance network_info: |[{"id": "1e776d51-5ada-4779-87c3-320b11662f1d", "address": "fa:16:3e:9b:30:50", "network": {"id": "80ef4e5d-45f4-48c0-85f5-828f90a0228c", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-165871578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d6fc1be135a4350ab17e4e4d59f0617", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e776d51-5a", "ovs_interfaceid": "1e776d51-5ada-4779-87c3-320b11662f1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2045.273683] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:30:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721e64ee-fc02-4eb5-9c8c-ea55647a1b92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e776d51-5ada-4779-87c3-320b11662f1d', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2045.280979] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Creating folder: Project (2d6fc1be135a4350ab17e4e4d59f0617). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2045.281253] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-565dffa4-7c8d-4e02-a9b5-755c8d209096 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.290772] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Created folder: Project (2d6fc1be135a4350ab17e4e4d59f0617) in parent group-v444854. [ 2045.290939] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Creating folder: Instances. Parent ref: group-v444885. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2045.291162] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09ce2d63-d478-4c37-b989-a51d814744b3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.299649] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Created folder: Instances in parent group-v444885. [ 2045.299879] env[62875]: DEBUG oslo.service.loopingcall [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2045.300061] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2045.300252] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-efbc2408-4de5-4c12-a9bc-3891236eafee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.318328] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2045.318328] env[62875]: value = "task-2180021" [ 2045.318328] env[62875]: _type = "Task" [ 2045.318328] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.325171] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180021, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.393017] env[62875]: DEBUG nova.compute.manager [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Received event network-vif-plugged-1e776d51-5ada-4779-87c3-320b11662f1d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2045.393235] env[62875]: DEBUG oslo_concurrency.lockutils [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] Acquiring lock "37ae8e69-f953-4846-8a21-fed697ea575a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.393425] env[62875]: DEBUG oslo_concurrency.lockutils [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] Lock "37ae8e69-f953-4846-8a21-fed697ea575a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.393599] env[62875]: DEBUG oslo_concurrency.lockutils [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] Lock "37ae8e69-f953-4846-8a21-fed697ea575a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.393782] env[62875]: DEBUG nova.compute.manager [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] No waiting events found dispatching network-vif-plugged-1e776d51-5ada-4779-87c3-320b11662f1d {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2045.393949] env[62875]: WARNING nova.compute.manager [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Received unexpected event network-vif-plugged-1e776d51-5ada-4779-87c3-320b11662f1d for instance with vm_state building and task_state spawning. [ 2045.394118] env[62875]: DEBUG nova.compute.manager [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Received event network-changed-1e776d51-5ada-4779-87c3-320b11662f1d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2045.394291] env[62875]: DEBUG nova.compute.manager [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Refreshing instance network info cache due to event network-changed-1e776d51-5ada-4779-87c3-320b11662f1d. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2045.394478] env[62875]: DEBUG oslo_concurrency.lockutils [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] Acquiring lock "refresh_cache-37ae8e69-f953-4846-8a21-fed697ea575a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2045.394615] env[62875]: DEBUG oslo_concurrency.lockutils [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] Acquired lock "refresh_cache-37ae8e69-f953-4846-8a21-fed697ea575a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2045.394781] env[62875]: DEBUG nova.network.neutron [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Refreshing network info cache for port 1e776d51-5ada-4779-87c3-320b11662f1d {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2045.573508] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.647s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.576150] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.116s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.578073] env[62875]: INFO nova.compute.claims [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2045.599547] env[62875]: INFO nova.scheduler.client.report [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Deleted allocations for instance c6de797f-03f7-4dca-9c6a-e7b840990be6 [ 2045.612594] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52945355-c1e9-8ec5-6c5a-609f7d898f31, 'name': SearchDatastore_Task, 'duration_secs': 0.010992} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.613085] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.613361] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] c1e107cd-5c03-405f-bdae-3281dc4844d5/c1e107cd-5c03-405f-bdae-3281dc4844d5.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2045.613613] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b014df9-8de8-476a-91ef-6b7e283d2af7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.619292] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180018, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.023962} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.619871] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] File moved {{(pid=62875) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 2045.620097] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Cleaning up location [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2045.620261] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleting the datastore file [datastore1] vmware_temp/2dbfc6cb-e920-43e8-8576-e3f7110a27e9 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2045.620502] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bc88a91-a408-4d1f-b7cd-43009d649f0c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.624019] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2045.624019] env[62875]: value = "task-2180022" [ 2045.624019] env[62875]: _type = "Task" [ 2045.624019] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.628853] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2045.628853] env[62875]: value = "task-2180023" [ 2045.628853] env[62875]: _type = "Task" [ 2045.628853] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.635364] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180022, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.640207] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.829345] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180021, 'name': CreateVM_Task, 'duration_secs': 0.38186} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.829553] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2045.830372] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2045.830571] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2045.830907] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2045.831239] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7009c0e6-468e-42fb-b931-5211f6a44fcd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.836887] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2045.836887] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bee733-bf2a-7ff1-0c7b-1b72465d4a88" [ 2045.836887] env[62875]: _type = "Task" [ 2045.836887] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.846328] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bee733-bf2a-7ff1-0c7b-1b72465d4a88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.118387] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00fc6176-65a9-4e7b-b6d4-177562b7ecf0 tempest-ServerGroupTestJSON-1653833888 tempest-ServerGroupTestJSON-1653833888-project-member] Lock "c6de797f-03f7-4dca-9c6a-e7b840990be6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.237s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.143959] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180022, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.144583] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029907} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.144966] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2046.147124] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-536e4977-8999-459e-b57e-1de139313910 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.154410] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2046.154410] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5220cba5-8755-da7b-83fd-5594dd944ac6" [ 2046.154410] env[62875]: _type = "Task" [ 2046.154410] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.164204] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5220cba5-8755-da7b-83fd-5594dd944ac6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.270722] env[62875]: DEBUG nova.network.neutron [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Updated VIF entry in instance network info cache for port 1e776d51-5ada-4779-87c3-320b11662f1d. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2046.271139] env[62875]: DEBUG nova.network.neutron [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Updating instance_info_cache with network_info: [{"id": "1e776d51-5ada-4779-87c3-320b11662f1d", "address": "fa:16:3e:9b:30:50", "network": {"id": "80ef4e5d-45f4-48c0-85f5-828f90a0228c", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-165871578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d6fc1be135a4350ab17e4e4d59f0617", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e776d51-5a", "ovs_interfaceid": "1e776d51-5ada-4779-87c3-320b11662f1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2046.347924] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bee733-bf2a-7ff1-0c7b-1b72465d4a88, 'name': SearchDatastore_Task, 'duration_secs': 0.059601} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.348238] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.348474] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2046.348706] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.348852] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.349057] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2046.349312] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1fcb6008-9cb9-4c68-b569-586cfb7c0990 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.356814] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2046.356986] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2046.357649] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32aa2a2a-1c81-4dc2-a81e-781c5a42fe86 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.362444] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2046.362444] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b2fc94-370b-92cb-0207-0dc3086cfab3" [ 2046.362444] env[62875]: _type = "Task" [ 2046.362444] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.369258] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b2fc94-370b-92cb-0207-0dc3086cfab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.639068] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180022, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53686} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.639372] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] c1e107cd-5c03-405f-bdae-3281dc4844d5/c1e107cd-5c03-405f-bdae-3281dc4844d5.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2046.639711] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2046.639964] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08cd8870-f82f-469c-8f51-3721f3c7d369 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.647263] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2046.647263] env[62875]: value = "task-2180024" [ 2046.647263] env[62875]: _type = "Task" [ 2046.647263] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.655106] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.665564] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5220cba5-8755-da7b-83fd-5594dd944ac6, 'name': SearchDatastore_Task, 'duration_secs': 0.012198} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.665817] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.666055] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 305aebbe-f983-4826-b8c0-9854458f7d48/305aebbe-f983-4826-b8c0-9854458f7d48.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2046.666311] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db5be5ca-ea5d-4258-af96-d7c17f44a927 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.675044] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2046.675044] env[62875]: value = "task-2180025" [ 2046.675044] env[62875]: _type = "Task" [ 2046.675044] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.681629] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.775340] env[62875]: DEBUG oslo_concurrency.lockutils [req-2127971d-1b75-450f-afd0-294f6413814a req-f1b80a1c-5823-4879-ba68-c04c6c80f01f service nova] Releasing lock "refresh_cache-37ae8e69-f953-4846-8a21-fed697ea575a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.831478] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82db5f0-4008-41a1-8dbf-ac6982e2432f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.840090] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1106dc88-05f8-4379-b7ea-8e71e6b203c4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.877150] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3680e80-ebfd-4fbe-9027-f713b622c01e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.888639] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c6c250-38bd-4804-8168-621a7e771019 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.892759] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b2fc94-370b-92cb-0207-0dc3086cfab3, 'name': SearchDatastore_Task, 'duration_secs': 0.009044} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.893970] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b7658ec-4251-4855-b396-667e5df29a76 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.904695] env[62875]: DEBUG nova.compute.provider_tree [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2046.909375] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2046.909375] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526803d1-4bc4-72e1-359f-1eee2c098f54" [ 2046.909375] env[62875]: _type = "Task" [ 2046.909375] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.917406] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526803d1-4bc4-72e1-359f-1eee2c098f54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.156670] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180024, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072192} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.156931] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2047.157756] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9081fc-7783-444a-9d68-051c3d0e66e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.179473] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] c1e107cd-5c03-405f-bdae-3281dc4844d5/c1e107cd-5c03-405f-bdae-3281dc4844d5.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2047.180076] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72d6fe11-f1a1-4f09-bd5e-5f6b2312047b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.202548] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468582} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.203696] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 305aebbe-f983-4826-b8c0-9854458f7d48/305aebbe-f983-4826-b8c0-9854458f7d48.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2047.203920] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2047.204229] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2047.204229] env[62875]: value = "task-2180026" [ 2047.204229] env[62875]: _type = "Task" [ 2047.204229] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.204408] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2fba1459-36e5-42f6-8fe3-819bf4159c60 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.213855] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180026, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.214999] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2047.214999] env[62875]: value = "task-2180027" [ 2047.214999] env[62875]: _type = "Task" [ 2047.214999] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.221912] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.408467] env[62875]: DEBUG nova.scheduler.client.report [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2047.421378] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526803d1-4bc4-72e1-359f-1eee2c098f54, 'name': SearchDatastore_Task, 'duration_secs': 0.018614} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.421643] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.421892] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 37ae8e69-f953-4846-8a21-fed697ea575a/37ae8e69-f953-4846-8a21-fed697ea575a.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2047.422144] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38646bf6-8c10-4226-81f9-569d9d71e578 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.429348] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2047.429348] env[62875]: value = "task-2180028" [ 2047.429348] env[62875]: _type = "Task" [ 2047.429348] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.438540] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.717038] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180026, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.725794] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060773} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.726119] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2047.726951] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c2ca7c-0fcd-486f-b7cd-a8e950e1537f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.749872] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 305aebbe-f983-4826-b8c0-9854458f7d48/305aebbe-f983-4826-b8c0-9854458f7d48.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2047.750273] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-409e2d47-8140-4fef-b167-4f23315eccfb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.773379] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2047.773379] env[62875]: value = "task-2180029" [ 2047.773379] env[62875]: _type = "Task" [ 2047.773379] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.781425] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.916822] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.917467] env[62875]: DEBUG nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2047.920570] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.521s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.922300] env[62875]: INFO nova.compute.claims [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2047.941356] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180028, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.217093] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180026, 'name': ReconfigVM_Task, 'duration_secs': 0.722502} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.217424] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Reconfigured VM instance instance-0000002d to attach disk [datastore2] c1e107cd-5c03-405f-bdae-3281dc4844d5/c1e107cd-5c03-405f-bdae-3281dc4844d5.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2048.218164] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b386f24f-450e-434c-a506-722d901a4a11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.226949] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2048.226949] env[62875]: value = "task-2180030" [ 2048.226949] env[62875]: _type = "Task" [ 2048.226949] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.235932] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180030, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.282874] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180029, 'name': ReconfigVM_Task, 'duration_secs': 0.316003} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.283377] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 305aebbe-f983-4826-b8c0-9854458f7d48/305aebbe-f983-4826-b8c0-9854458f7d48.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2048.284440] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d40fad68-aacf-4e25-a223-41d871d9d39e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.291029] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2048.291029] env[62875]: value = "task-2180031" [ 2048.291029] env[62875]: _type = "Task" [ 2048.291029] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.301920] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180031, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.428430] env[62875]: DEBUG nova.compute.utils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2048.432278] env[62875]: DEBUG nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2048.432424] env[62875]: DEBUG nova.network.neutron [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2048.449017] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180028, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680819} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.449017] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 37ae8e69-f953-4846-8a21-fed697ea575a/37ae8e69-f953-4846-8a21-fed697ea575a.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2048.449017] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2048.449017] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-886ac558-959a-4227-a0ce-566e3adffba8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.455844] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2048.455844] env[62875]: value = "task-2180032" [ 2048.455844] env[62875]: _type = "Task" [ 2048.455844] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.465082] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180032, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.506169] env[62875]: DEBUG nova.policy [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1627c54e8e7f4712b9dcd174f991811e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '770eddfb80d943c7a34a3d9a60845079', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2048.737591] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180030, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.801863] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180031, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.933341] env[62875]: DEBUG nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2048.968543] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180032, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.143042] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4417b36-ad45-4339-9eb3-b36c94f063ae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.150847] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab1066d-cf7b-463e-9339-6c96287fbaa4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.156104] env[62875]: DEBUG nova.network.neutron [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Successfully created port: 45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2049.186250] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1a6890-b872-468d-a0a6-5805008d4a40 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.194616] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de956de-1e99-4bd2-acdd-cc5000594066 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.208034] env[62875]: DEBUG nova.compute.provider_tree [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2049.239264] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180030, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.301850] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180031, 'name': Rename_Task, 'duration_secs': 0.872169} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.302107] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2049.302346] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51cab571-6424-4181-b1aa-094223271e42 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.308512] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2049.308512] env[62875]: value = "task-2180033" [ 2049.308512] env[62875]: _type = "Task" [ 2049.308512] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.316086] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.469631] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180032, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.980255} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.469992] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2049.474851] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5d8266-f91f-4105-8abe-304234f4b190 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.505043] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 37ae8e69-f953-4846-8a21-fed697ea575a/37ae8e69-f953-4846-8a21-fed697ea575a.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2049.505306] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b56604a1-5916-4c63-b99c-2579d175006a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.528562] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2049.528562] env[62875]: value = "task-2180034" [ 2049.528562] env[62875]: _type = "Task" [ 2049.528562] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.541465] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.737713] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180030, 'name': Rename_Task, 'duration_secs': 1.309007} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.738062] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2049.738321] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98c3889b-5c2f-4226-87b9-24b765adc145 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.741700] env[62875]: ERROR nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [req-92bd654f-f80d-476c-891c-9cee4cfab2ce] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-92bd654f-f80d-476c-891c-9cee4cfab2ce"}]} [ 2049.750027] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2049.750027] env[62875]: value = "task-2180035" [ 2049.750027] env[62875]: _type = "Task" [ 2049.750027] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.760400] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180035, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.764324] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2049.787211] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2049.787559] env[62875]: DEBUG nova.compute.provider_tree [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2049.807789] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2049.819816] env[62875]: DEBUG oslo_vmware.api [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180033, 'name': PowerOnVM_Task, 'duration_secs': 0.472401} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.820121] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2049.820639] env[62875]: INFO nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Took 12.45 seconds to spawn the instance on the hypervisor. [ 2049.820639] env[62875]: DEBUG nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2049.821405] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246ed329-3116-419e-9c3f-ced3d5d20993 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.838920] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2049.948987] env[62875]: DEBUG nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2049.973091] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2049.973346] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2049.973468] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2049.973643] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2049.973791] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2049.973939] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2049.974166] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2049.974383] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2049.974490] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2049.974671] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2049.974871] env[62875]: DEBUG nova.virt.hardware [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2049.975771] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5883b8-8853-4a9c-8f63-4c2f8acd3eb5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.986145] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f10743-c2b1-4bd9-861e-e88249e26170 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.037302] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.106349] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b216ca88-eda7-4292-92fa-f766408a9d97 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.114833] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad32293-bdfa-4f98-9158-4a5854143d51 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.145864] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f8a6c1-0550-4971-ad64-7ebcd9e43bd0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.153119] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d04e61e-8a98-406f-8840-0a583a8d7b3a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.166178] env[62875]: DEBUG nova.compute.provider_tree [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2050.260047] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180035, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.336740] env[62875]: INFO nova.compute.manager [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Took 33.66 seconds to build instance. [ 2050.439439] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.439821] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.539524] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180034, 'name': ReconfigVM_Task, 'duration_secs': 0.86612} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.539796] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 37ae8e69-f953-4846-8a21-fed697ea575a/37ae8e69-f953-4846-8a21-fed697ea575a.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2050.540519] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39d282b3-8475-4997-9f18-c17ca63432ee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.546957] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2050.546957] env[62875]: value = "task-2180036" [ 2050.546957] env[62875]: _type = "Task" [ 2050.546957] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.554724] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180036, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.690883] env[62875]: ERROR nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [req-9c24f4f7-1c43-4f91-b046-21c49d32d386] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9c24f4f7-1c43-4f91-b046-21c49d32d386"}]} [ 2050.708821] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2050.724188] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2050.724441] env[62875]: DEBUG nova.compute.provider_tree [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2050.736442] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2050.757718] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2050.763314] env[62875]: DEBUG oslo_vmware.api [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180035, 'name': PowerOnVM_Task, 'duration_secs': 0.928222} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.763565] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2050.763791] env[62875]: INFO nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Took 9.98 seconds to spawn the instance on the hypervisor. [ 2050.763986] env[62875]: DEBUG nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2050.764786] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9925d515-0eab-45ef-9536-1245431f2dd7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.772861] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "a19f5bee-ece8-4aa3-8c33-9474da385238" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.773188] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "a19f5bee-ece8-4aa3-8c33-9474da385238" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.839413] env[62875]: DEBUG oslo_concurrency.lockutils [None req-981db9ae-21a0-425c-94f4-4f5d0c6a7f28 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.793s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.027844] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6356d13-a519-488e-bce9-cd15df6ca77d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.035965] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c959b4-33c3-4dbd-9f2b-1483f7114385 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.075722] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261415f6-7cbd-49c3-b78d-614d53b3a810 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.084706] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180036, 'name': Rename_Task, 'duration_secs': 0.228237} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.087861] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2051.088343] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abe4ef28-fc86-486f-b75e-70ea9beef31a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.090826] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c3742b-e779-48af-813a-1f8483bcbda7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.105203] env[62875]: DEBUG nova.compute.provider_tree [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2051.107561] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2051.107561] env[62875]: value = "task-2180037" [ 2051.107561] env[62875]: _type = "Task" [ 2051.107561] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.178753] env[62875]: DEBUG nova.compute.manager [req-3f89ba9b-e14b-4d8c-981e-fc0548757045 req-30c3a393-5d0f-4bbe-94e3-d3f1652df40c service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Received event network-vif-plugged-45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2051.178980] env[62875]: DEBUG oslo_concurrency.lockutils [req-3f89ba9b-e14b-4d8c-981e-fc0548757045 req-30c3a393-5d0f-4bbe-94e3-d3f1652df40c service nova] Acquiring lock "d0c4095f-2d78-4055-b568-7e70e7c4c182-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.179214] env[62875]: DEBUG oslo_concurrency.lockutils [req-3f89ba9b-e14b-4d8c-981e-fc0548757045 req-30c3a393-5d0f-4bbe-94e3-d3f1652df40c service nova] Lock "d0c4095f-2d78-4055-b568-7e70e7c4c182-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.179384] env[62875]: DEBUG oslo_concurrency.lockutils [req-3f89ba9b-e14b-4d8c-981e-fc0548757045 req-30c3a393-5d0f-4bbe-94e3-d3f1652df40c service nova] Lock "d0c4095f-2d78-4055-b568-7e70e7c4c182-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.179588] env[62875]: DEBUG nova.compute.manager [req-3f89ba9b-e14b-4d8c-981e-fc0548757045 req-30c3a393-5d0f-4bbe-94e3-d3f1652df40c service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] No waiting events found dispatching network-vif-plugged-45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2051.179757] env[62875]: WARNING nova.compute.manager [req-3f89ba9b-e14b-4d8c-981e-fc0548757045 req-30c3a393-5d0f-4bbe-94e3-d3f1652df40c service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Received unexpected event network-vif-plugged-45fd138d-48ba-4dbc-b40b-e424777fed62 for instance with vm_state building and task_state spawning. [ 2051.266891] env[62875]: DEBUG nova.network.neutron [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Successfully updated port: 45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2051.284288] env[62875]: INFO nova.compute.manager [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Took 29.39 seconds to build instance. [ 2051.344812] env[62875]: DEBUG nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2051.612347] env[62875]: DEBUG nova.scheduler.client.report [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2051.622376] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180037, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.632296] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquiring lock "c1e107cd-5c03-405f-bdae-3281dc4844d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.770040] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2051.770040] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2051.770386] env[62875]: DEBUG nova.network.neutron [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2051.787403] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bfaace1f-cd13-417c-8a5c-f5897b986422 tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "c1e107cd-5c03-405f-bdae-3281dc4844d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.880s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.788720] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "c1e107cd-5c03-405f-bdae-3281dc4844d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.157s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.789608] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquiring lock "c1e107cd-5c03-405f-bdae-3281dc4844d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.789859] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "c1e107cd-5c03-405f-bdae-3281dc4844d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.790052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "c1e107cd-5c03-405f-bdae-3281dc4844d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.791955] env[62875]: INFO nova.compute.manager [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Terminating instance [ 2051.867577] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.936062] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "305aebbe-f983-4826-b8c0-9854458f7d48" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.936522] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.118347] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.198s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.118856] env[62875]: DEBUG nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2052.121417] env[62875]: DEBUG oslo_vmware.api [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180037, 'name': PowerOnVM_Task, 'duration_secs': 1.012089} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.121689] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.109s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.123067] env[62875]: INFO nova.compute.claims [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2052.125568] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2052.125767] env[62875]: INFO nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Took 8.99 seconds to spawn the instance on the hypervisor. [ 2052.125933] env[62875]: DEBUG nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2052.127013] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b45fe1-f2a2-4730-b903-48d88ae1ce13 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.294723] env[62875]: DEBUG nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2052.299141] env[62875]: DEBUG nova.compute.manager [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2052.299366] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2052.300287] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e835f67a-fddf-4fee-8129-527faa7ab062 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.308256] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2052.308496] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ff66233-0086-4a5a-8cbe-06b505e35816 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.315298] env[62875]: DEBUG oslo_vmware.api [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2052.315298] env[62875]: value = "task-2180038" [ 2052.315298] env[62875]: _type = "Task" [ 2052.315298] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.316064] env[62875]: DEBUG nova.network.neutron [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2052.326562] env[62875]: DEBUG oslo_vmware.api [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180038, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.443277] env[62875]: DEBUG nova.compute.utils [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2052.509749] env[62875]: DEBUG nova.network.neutron [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Updating instance_info_cache with network_info: [{"id": "45fd138d-48ba-4dbc-b40b-e424777fed62", "address": "fa:16:3e:bb:5c:21", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45fd138d-48", "ovs_interfaceid": "45fd138d-48ba-4dbc-b40b-e424777fed62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2052.628854] env[62875]: DEBUG nova.compute.utils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2052.633145] env[62875]: DEBUG nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2052.633503] env[62875]: DEBUG nova.network.neutron [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2052.643731] env[62875]: INFO nova.compute.manager [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Took 30.05 seconds to build instance. [ 2052.697901] env[62875]: DEBUG nova.policy [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e818b6d10af42bb9c86e79ae93de507', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7523e34b88d84ec1ae28221d8d1a3591', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2052.827996] env[62875]: DEBUG oslo_vmware.api [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180038, 'name': PowerOffVM_Task, 'duration_secs': 0.190658} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.828992] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.829393] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2052.829622] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2052.829856] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d05edb9-fc7a-4d15-b667-8fa712735392 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.906997] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2052.906997] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2052.907230] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Deleting the datastore file [datastore2] c1e107cd-5c03-405f-bdae-3281dc4844d5 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2052.907495] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-903cd9f5-0685-43b5-9576-070cab200f84 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.914205] env[62875]: DEBUG oslo_vmware.api [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for the task: (returnval){ [ 2052.914205] env[62875]: value = "task-2180040" [ 2052.914205] env[62875]: _type = "Task" [ 2052.914205] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.922517] env[62875]: DEBUG oslo_vmware.api [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.949898] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.012610] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Releasing lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2053.013056] env[62875]: DEBUG nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Instance network_info: |[{"id": "45fd138d-48ba-4dbc-b40b-e424777fed62", "address": "fa:16:3e:bb:5c:21", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45fd138d-48", "ovs_interfaceid": "45fd138d-48ba-4dbc-b40b-e424777fed62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2053.013590] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:5c:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a64108f9-df0a-4feb-bbb5-97f5841c356c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45fd138d-48ba-4dbc-b40b-e424777fed62', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2053.021636] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Creating folder: Project (770eddfb80d943c7a34a3d9a60845079). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2053.022036] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4e224eb-6309-4913-88b0-6fdffe70b47b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.033491] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Created folder: Project (770eddfb80d943c7a34a3d9a60845079) in parent group-v444854. [ 2053.033719] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Creating folder: Instances. Parent ref: group-v444888. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2053.034061] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4db06ba-ef6d-4197-b3b1-4a088b969f66 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.043458] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Created folder: Instances in parent group-v444888. [ 2053.043664] env[62875]: DEBUG oslo.service.loopingcall [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2053.044342] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2053.044342] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a811fe50-44ec-4f16-a9b6-a731af82fa99 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.059562] env[62875]: DEBUG nova.network.neutron [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Successfully created port: 78c0725e-6d52-4486-acdf-e95c7a1ae020 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2053.065970] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2053.065970] env[62875]: value = "task-2180043" [ 2053.065970] env[62875]: _type = "Task" [ 2053.065970] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.073524] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180043, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.133545] env[62875]: DEBUG nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2053.148289] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5a096e84-29f7-4463-bdda-e9ed903d4625 tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "37ae8e69-f953-4846-8a21-fed697ea575a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.167s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.391738] env[62875]: DEBUG nova.compute.manager [req-538e8518-43f7-45d2-8eb4-381db5f99624 req-076b3076-f69f-4d26-92fc-96078434ba9c service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Received event network-changed-45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2053.391953] env[62875]: DEBUG nova.compute.manager [req-538e8518-43f7-45d2-8eb4-381db5f99624 req-076b3076-f69f-4d26-92fc-96078434ba9c service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Refreshing instance network info cache due to event network-changed-45fd138d-48ba-4dbc-b40b-e424777fed62. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2053.392208] env[62875]: DEBUG oslo_concurrency.lockutils [req-538e8518-43f7-45d2-8eb4-381db5f99624 req-076b3076-f69f-4d26-92fc-96078434ba9c service nova] Acquiring lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.392354] env[62875]: DEBUG oslo_concurrency.lockutils [req-538e8518-43f7-45d2-8eb4-381db5f99624 req-076b3076-f69f-4d26-92fc-96078434ba9c service nova] Acquired lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.392517] env[62875]: DEBUG nova.network.neutron [req-538e8518-43f7-45d2-8eb4-381db5f99624 req-076b3076-f69f-4d26-92fc-96078434ba9c service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Refreshing network info cache for port 45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2053.420501] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899ae3ee-ab20-4d17-b29e-0c4f453979d0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.428317] env[62875]: DEBUG oslo_vmware.api [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Task: {'id': task-2180040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130425} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.430112] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2053.430319] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2053.430519] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2053.430726] env[62875]: INFO nova.compute.manager [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2053.430971] env[62875]: DEBUG oslo.service.loopingcall [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2053.431228] env[62875]: DEBUG nova.compute.manager [-] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2053.431328] env[62875]: DEBUG nova.network.neutron [-] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2053.433670] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbe9ccf-692c-40c9-bcfe-ddc1925cbc20 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.472987] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25480ac4-790f-4bc2-8e73-7a01fb9de95d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.480693] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d7cec1-29e5-462e-b50d-d10b6c5e8420 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.494079] env[62875]: DEBUG nova.compute.provider_tree [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2053.575686] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180043, 'name': CreateVM_Task, 'duration_secs': 0.351525} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.575856] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2053.576548] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.576713] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.577246] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2053.577584] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c29eab1-63d2-4a16-896c-957e1a024f62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.581836] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2053.581836] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ed6b8a-cebf-1429-de04-5b97b75545b1" [ 2053.581836] env[62875]: _type = "Task" [ 2053.581836] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.589976] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ed6b8a-cebf-1429-de04-5b97b75545b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.655833] env[62875]: DEBUG nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2053.777182] env[62875]: DEBUG nova.compute.manager [req-94335600-cc37-404d-aef8-b8233c145e38 req-e0994630-f051-499b-a7d4-1ff35223d72c service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Received event network-vif-deleted-b429f7a6-ae5e-41e1-9212-82b051f2e35b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2053.777408] env[62875]: INFO nova.compute.manager [req-94335600-cc37-404d-aef8-b8233c145e38 req-e0994630-f051-499b-a7d4-1ff35223d72c service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Neutron deleted interface b429f7a6-ae5e-41e1-9212-82b051f2e35b; detaching it from the instance and deleting it from the info cache [ 2053.777678] env[62875]: DEBUG nova.network.neutron [req-94335600-cc37-404d-aef8-b8233c145e38 req-e0994630-f051-499b-a7d4-1ff35223d72c service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2053.997680] env[62875]: DEBUG nova.scheduler.client.report [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2054.023979] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "305aebbe-f983-4826-b8c0-9854458f7d48" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.024317] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.024576] env[62875]: INFO nova.compute.manager [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Attaching volume e2465556-b4a9-4f12-9fd7-73c7f79b83f6 to /dev/sdb [ 2054.059929] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c09ad9-1675-4f49-bfb0-93001b013a14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.067361] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754bdb88-85e3-4da0-a85c-cbf5b5a44228 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.081525] env[62875]: DEBUG nova.virt.block_device [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Updating existing volume attachment record: 5ec37211-ae1b-45fa-b733-a7c72390b137 {{(pid=62875) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2054.094691] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ed6b8a-cebf-1429-de04-5b97b75545b1, 'name': SearchDatastore_Task, 'duration_secs': 0.00908} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.094998] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.095265] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2054.095500] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2054.095650] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2054.095824] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2054.096098] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4256e572-97e7-4a00-bdac-3cf98226d76c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.109611] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2054.109848] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2054.110668] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64e3add0-c94e-4e0b-bcf3-7989a1afa5ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.116122] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2054.116122] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f10890-7b24-f1a6-3b47-5f8ea9b46fa6" [ 2054.116122] env[62875]: _type = "Task" [ 2054.116122] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.124096] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f10890-7b24-f1a6-3b47-5f8ea9b46fa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.124822] env[62875]: DEBUG nova.network.neutron [req-538e8518-43f7-45d2-8eb4-381db5f99624 req-076b3076-f69f-4d26-92fc-96078434ba9c service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Updated VIF entry in instance network info cache for port 45fd138d-48ba-4dbc-b40b-e424777fed62. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2054.125165] env[62875]: DEBUG nova.network.neutron [req-538e8518-43f7-45d2-8eb4-381db5f99624 req-076b3076-f69f-4d26-92fc-96078434ba9c service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Updating instance_info_cache with network_info: [{"id": "45fd138d-48ba-4dbc-b40b-e424777fed62", "address": "fa:16:3e:bb:5c:21", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45fd138d-48", "ovs_interfaceid": "45fd138d-48ba-4dbc-b40b-e424777fed62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2054.145436] env[62875]: DEBUG nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2054.174147] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2054.174544] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2054.174796] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2054.175110] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2054.175416] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2054.175681] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2054.176035] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2054.176287] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2054.176548] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2054.176800] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2054.177059] env[62875]: DEBUG nova.virt.hardware [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2054.178361] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca022685-a93f-4daf-9936-9f3b9b15c512 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.185171] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.193026] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd8b919-ac82-4270-b76d-41d1fc6f697e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.260361] env[62875]: DEBUG nova.network.neutron [-] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2054.281065] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2f433f4-e869-4324-857b-718e0cc5b2ac {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.292940] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7f0976-a811-4621-99cc-8b806d677ea1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.318345] env[62875]: DEBUG nova.compute.manager [req-94335600-cc37-404d-aef8-b8233c145e38 req-e0994630-f051-499b-a7d4-1ff35223d72c service nova] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Detach interface failed, port_id=b429f7a6-ae5e-41e1-9212-82b051f2e35b, reason: Instance c1e107cd-5c03-405f-bdae-3281dc4844d5 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2054.442264] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquiring lock "37ae8e69-f953-4846-8a21-fed697ea575a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.442561] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "37ae8e69-f953-4846-8a21-fed697ea575a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.442780] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquiring lock "37ae8e69-f953-4846-8a21-fed697ea575a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.442959] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "37ae8e69-f953-4846-8a21-fed697ea575a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.443145] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "37ae8e69-f953-4846-8a21-fed697ea575a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.448085] env[62875]: INFO nova.compute.manager [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Terminating instance [ 2054.502920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.504352] env[62875]: DEBUG nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2054.506884] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.581s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.508830] env[62875]: INFO nova.compute.claims [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2054.628578] env[62875]: DEBUG oslo_concurrency.lockutils [req-538e8518-43f7-45d2-8eb4-381db5f99624 req-076b3076-f69f-4d26-92fc-96078434ba9c service nova] Releasing lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.629016] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f10890-7b24-f1a6-3b47-5f8ea9b46fa6, 'name': SearchDatastore_Task, 'duration_secs': 0.031083} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.629886] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e26917e-0dce-415a-9e1b-369c47eeffe4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.636962] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2054.636962] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dad0fc-b0bd-2068-26f2-9507ceec0640" [ 2054.636962] env[62875]: _type = "Task" [ 2054.636962] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.647792] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dad0fc-b0bd-2068-26f2-9507ceec0640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.726306] env[62875]: DEBUG nova.network.neutron [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Successfully updated port: 78c0725e-6d52-4486-acdf-e95c7a1ae020 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2054.764705] env[62875]: INFO nova.compute.manager [-] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Took 1.33 seconds to deallocate network for instance. [ 2054.955931] env[62875]: DEBUG nova.compute.manager [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2054.956202] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2054.957142] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3680fb30-40ba-40d8-8453-39651edb847e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.965940] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2054.966222] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-334f6bee-ea65-44ad-baf4-e6ca65db8183 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.972253] env[62875]: DEBUG oslo_vmware.api [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2054.972253] env[62875]: value = "task-2180047" [ 2054.972253] env[62875]: _type = "Task" [ 2054.972253] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.981150] env[62875]: DEBUG oslo_vmware.api [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.015792] env[62875]: DEBUG nova.compute.utils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2055.017536] env[62875]: DEBUG nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2055.017715] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2055.107924] env[62875]: DEBUG nova.policy [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae8dd9ca6c0b462aa3db7bcfae81422b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63be470870764b6ab6e803cc2a345f24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2055.147699] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dad0fc-b0bd-2068-26f2-9507ceec0640, 'name': SearchDatastore_Task, 'duration_secs': 0.011636} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.148124] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2055.148299] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] d0c4095f-2d78-4055-b568-7e70e7c4c182/d0c4095f-2d78-4055-b568-7e70e7c4c182.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2055.148604] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97858bf9-5a04-4afe-9b55-9a5c01d54bbb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.156083] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2055.156083] env[62875]: value = "task-2180048" [ 2055.156083] env[62875]: _type = "Task" [ 2055.156083] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.165046] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.228964] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-8361611a-ad16-43ef-94e0-f2e7e9851682" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2055.229177] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-8361611a-ad16-43ef-94e0-f2e7e9851682" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2055.229298] env[62875]: DEBUG nova.network.neutron [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2055.271625] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2055.421248] env[62875]: DEBUG nova.compute.manager [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Received event network-vif-plugged-78c0725e-6d52-4486-acdf-e95c7a1ae020 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2055.421852] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] Acquiring lock "8361611a-ad16-43ef-94e0-f2e7e9851682-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2055.422159] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] Lock "8361611a-ad16-43ef-94e0-f2e7e9851682-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.422351] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] Lock "8361611a-ad16-43ef-94e0-f2e7e9851682-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.422526] env[62875]: DEBUG nova.compute.manager [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] No waiting events found dispatching network-vif-plugged-78c0725e-6d52-4486-acdf-e95c7a1ae020 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2055.422700] env[62875]: WARNING nova.compute.manager [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Received unexpected event network-vif-plugged-78c0725e-6d52-4486-acdf-e95c7a1ae020 for instance with vm_state building and task_state spawning. [ 2055.422862] env[62875]: DEBUG nova.compute.manager [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Received event network-changed-78c0725e-6d52-4486-acdf-e95c7a1ae020 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2055.423025] env[62875]: DEBUG nova.compute.manager [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Refreshing instance network info cache due to event network-changed-78c0725e-6d52-4486-acdf-e95c7a1ae020. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2055.423209] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] Acquiring lock "refresh_cache-8361611a-ad16-43ef-94e0-f2e7e9851682" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2055.483752] env[62875]: DEBUG oslo_vmware.api [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180047, 'name': PowerOffVM_Task, 'duration_secs': 0.207123} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.483948] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2055.484137] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2055.484391] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88e6a105-9ca6-471b-8b56-4f4a66c20fed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.522209] env[62875]: DEBUG nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2055.578866] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2055.578997] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2055.579154] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Deleting the datastore file [datastore2] 37ae8e69-f953-4846-8a21-fed697ea575a {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2055.579407] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63fd07af-971f-4a6b-b079-4decadbd0f7c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.592852] env[62875]: DEBUG oslo_vmware.api [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for the task: (returnval){ [ 2055.592852] env[62875]: value = "task-2180050" [ 2055.592852] env[62875]: _type = "Task" [ 2055.592852] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.606427] env[62875]: DEBUG oslo_vmware.api [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.636110] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Successfully created port: 3f89ecea-c779-49ec-8304-34ae86acec1f {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2055.672500] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180048, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.790784] env[62875]: DEBUG nova.network.neutron [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2055.885131] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7204f176-43d1-4d1b-8216-88d472e89d6b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.891998] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5497c5d6-60f6-4b08-8d45-83ce53654f8a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.927919] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8b1a84-3b42-4a9c-98dd-c64e4aefbc99 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.936594] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889cd77b-74d3-4f22-9612-a6ed37961c18 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.951782] env[62875]: DEBUG nova.compute.provider_tree [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2055.970755] env[62875]: DEBUG nova.network.neutron [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Updating instance_info_cache with network_info: [{"id": "78c0725e-6d52-4486-acdf-e95c7a1ae020", "address": "fa:16:3e:9b:37:3d", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c0725e-6d", "ovs_interfaceid": "78c0725e-6d52-4486-acdf-e95c7a1ae020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2056.062773] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Successfully created port: cf16fb35-456a-450b-8a75-bac02d08f481 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2056.106300] env[62875]: DEBUG oslo_vmware.api [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Task: {'id': task-2180050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.432135} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.106571] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2056.106758] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2056.106937] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2056.107123] env[62875]: INFO nova.compute.manager [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2056.107361] env[62875]: DEBUG oslo.service.loopingcall [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2056.107601] env[62875]: DEBUG nova.compute.manager [-] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2056.107696] env[62875]: DEBUG nova.network.neutron [-] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2056.167741] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.81693} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.172582] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] d0c4095f-2d78-4055-b568-7e70e7c4c182/d0c4095f-2d78-4055-b568-7e70e7c4c182.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2056.172582] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2056.172582] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc5d4624-b46e-4363-8287-2a632a323b44 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.179166] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2056.179166] env[62875]: value = "task-2180051" [ 2056.179166] env[62875]: _type = "Task" [ 2056.179166] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.190080] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.390304] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Successfully created port: 910a7aba-2677-4608-af5a-5efee055c3aa {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2056.402670] env[62875]: DEBUG nova.compute.manager [req-ec5afa85-b093-4948-8b92-6ee5f1a7e4d2 req-55b0049d-1b6f-4857-8666-36952d9973cf service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Received event network-vif-deleted-1e776d51-5ada-4779-87c3-320b11662f1d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2056.402876] env[62875]: INFO nova.compute.manager [req-ec5afa85-b093-4948-8b92-6ee5f1a7e4d2 req-55b0049d-1b6f-4857-8666-36952d9973cf service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Neutron deleted interface 1e776d51-5ada-4779-87c3-320b11662f1d; detaching it from the instance and deleting it from the info cache [ 2056.403066] env[62875]: DEBUG nova.network.neutron [req-ec5afa85-b093-4948-8b92-6ee5f1a7e4d2 req-55b0049d-1b6f-4857-8666-36952d9973cf service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2056.454996] env[62875]: DEBUG nova.scheduler.client.report [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2056.473218] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-8361611a-ad16-43ef-94e0-f2e7e9851682" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2056.473579] env[62875]: DEBUG nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Instance network_info: |[{"id": "78c0725e-6d52-4486-acdf-e95c7a1ae020", "address": "fa:16:3e:9b:37:3d", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c0725e-6d", "ovs_interfaceid": "78c0725e-6d52-4486-acdf-e95c7a1ae020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2056.473868] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] Acquired lock "refresh_cache-8361611a-ad16-43ef-94e0-f2e7e9851682" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2056.474083] env[62875]: DEBUG nova.network.neutron [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Refreshing network info cache for port 78c0725e-6d52-4486-acdf-e95c7a1ae020 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2056.475140] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:37:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78c0725e-6d52-4486-acdf-e95c7a1ae020', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2056.483692] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating folder: Project (7523e34b88d84ec1ae28221d8d1a3591). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2056.484468] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72eb538b-fffd-49a8-8e40-ce7b8e0a8122 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.495082] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created folder: Project (7523e34b88d84ec1ae28221d8d1a3591) in parent group-v444854. [ 2056.495300] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating folder: Instances. Parent ref: group-v444893. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2056.495533] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76aa9b9a-6c3b-41b6-a515-f3df3ed5d049 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.503662] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created folder: Instances in parent group-v444893. [ 2056.503880] env[62875]: DEBUG oslo.service.loopingcall [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2056.504067] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2056.504342] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3007d360-7647-4a52-9afd-18c5632c2823 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.523077] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2056.523077] env[62875]: value = "task-2180055" [ 2056.523077] env[62875]: _type = "Task" [ 2056.523077] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.532033] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180055, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.532704] env[62875]: DEBUG nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2056.556999] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2056.557267] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2056.557455] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2056.557613] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2056.557759] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2056.557906] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2056.558138] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2056.558296] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2056.558463] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2056.558628] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2056.558804] env[62875]: DEBUG nova.virt.hardware [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2056.559692] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f56f54-c36c-4326-84ab-40e90a49e937 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.568760] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2759cdda-f668-4a65-ac0d-e4425d43b65d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.687410] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064033} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.687754] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2056.688616] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce889ce-2892-4ef3-a4e6-a53120255d13 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.712963] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] d0c4095f-2d78-4055-b568-7e70e7c4c182/d0c4095f-2d78-4055-b568-7e70e7c4c182.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2056.712963] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-549b81d3-1b16-4661-aaa9-a7e46f1fabe4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.733294] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2056.733294] env[62875]: value = "task-2180056" [ 2056.733294] env[62875]: _type = "Task" [ 2056.733294] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.741692] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180056, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.866857] env[62875]: DEBUG nova.network.neutron [-] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2056.906527] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e3f5bad-2902-4479-8b41-f9af7f4ff465 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.916485] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cf8ba3-cf01-4879-bd80-b5a366a8eaa7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.942300] env[62875]: DEBUG nova.compute.manager [req-ec5afa85-b093-4948-8b92-6ee5f1a7e4d2 req-55b0049d-1b6f-4857-8666-36952d9973cf service nova] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Detach interface failed, port_id=1e776d51-5ada-4779-87c3-320b11662f1d, reason: Instance 37ae8e69-f953-4846-8a21-fed697ea575a could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2056.961277] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2056.961775] env[62875]: DEBUG nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2056.964429] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.911s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2056.966794] env[62875]: INFO nova.compute.claims [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2057.034043] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180055, 'name': CreateVM_Task, 'duration_secs': 0.500616} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.034175] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2057.034849] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.035044] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.035353] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2057.035650] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef46fb06-19c8-49c8-a4ec-4579537d6ccc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.040391] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2057.040391] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5287c3f1-2984-b28a-c502-6779cd260772" [ 2057.040391] env[62875]: _type = "Task" [ 2057.040391] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.050195] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5287c3f1-2984-b28a-c502-6779cd260772, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.184497] env[62875]: DEBUG nova.network.neutron [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Updated VIF entry in instance network info cache for port 78c0725e-6d52-4486-acdf-e95c7a1ae020. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2057.184895] env[62875]: DEBUG nova.network.neutron [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Updating instance_info_cache with network_info: [{"id": "78c0725e-6d52-4486-acdf-e95c7a1ae020", "address": "fa:16:3e:9b:37:3d", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c0725e-6d", "ovs_interfaceid": "78c0725e-6d52-4486-acdf-e95c7a1ae020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2057.243546] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180056, 'name': ReconfigVM_Task, 'duration_secs': 0.34684} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.243909] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Reconfigured VM instance instance-0000002f to attach disk [datastore2] d0c4095f-2d78-4055-b568-7e70e7c4c182/d0c4095f-2d78-4055-b568-7e70e7c4c182.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2057.244691] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-430820f0-0ecd-4ab6-99b8-b66e60ab6095 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.251375] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2057.251375] env[62875]: value = "task-2180057" [ 2057.251375] env[62875]: _type = "Task" [ 2057.251375] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.260712] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180057, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.370199] env[62875]: INFO nova.compute.manager [-] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Took 1.26 seconds to deallocate network for instance. [ 2057.474344] env[62875]: DEBUG nova.compute.utils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2057.476895] env[62875]: DEBUG nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2057.477504] env[62875]: DEBUG nova.network.neutron [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2057.520052] env[62875]: DEBUG nova.policy [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3d0e175791341aea0db00ef8a1b5680', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '226340868e7446cca12688a32d13c630', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2057.551329] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5287c3f1-2984-b28a-c502-6779cd260772, 'name': SearchDatastore_Task, 'duration_secs': 0.009947} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.551670] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2057.551915] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2057.552188] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.552335] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.552532] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2057.552819] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c7edd3b-26b2-4b10-a17e-a837e6bf0b3d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.562472] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2057.562659] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2057.563493] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26784749-1dda-40bf-b1b4-e1ef058276f4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.569597] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2057.569597] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526edd1c-b005-4d9e-baac-02e0556846fc" [ 2057.569597] env[62875]: _type = "Task" [ 2057.569597] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.577795] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526edd1c-b005-4d9e-baac-02e0556846fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.688262] env[62875]: DEBUG oslo_concurrency.lockutils [req-f0b37704-040c-4839-bbf0-f4ec5cf917b2 req-e1d45d87-b0e9-4144-b272-fe4ea0e43d0e service nova] Releasing lock "refresh_cache-8361611a-ad16-43ef-94e0-f2e7e9851682" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2057.761801] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180057, 'name': Rename_Task, 'duration_secs': 0.1427} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.762140] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2057.762416] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3ba52cd-68b3-4785-a935-b2fdc4efe6e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.768545] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2057.768545] env[62875]: value = "task-2180058" [ 2057.768545] env[62875]: _type = "Task" [ 2057.768545] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.779049] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.810015] env[62875]: DEBUG nova.network.neutron [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Successfully created port: 19bc9bc7-4c85-4add-a788-b2b0b2376185 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2057.877675] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.982329] env[62875]: DEBUG nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2058.082615] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526edd1c-b005-4d9e-baac-02e0556846fc, 'name': SearchDatastore_Task, 'duration_secs': 0.008889} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.084289] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbae5882-bab2-4b73-80fe-c007c5391796 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.094930] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2058.094930] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523e8565-e865-c24e-e4e9-a396eeb2bc88" [ 2058.094930] env[62875]: _type = "Task" [ 2058.094930] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.104825] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523e8565-e865-c24e-e4e9-a396eeb2bc88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.232570] env[62875]: DEBUG nova.compute.manager [req-46def67d-0a98-4191-9c6a-9ce5acb16bc7 req-b8c24f0d-ac25-465a-9164-8954248eb12a service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-vif-plugged-3f89ecea-c779-49ec-8304-34ae86acec1f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2058.232845] env[62875]: DEBUG oslo_concurrency.lockutils [req-46def67d-0a98-4191-9c6a-9ce5acb16bc7 req-b8c24f0d-ac25-465a-9164-8954248eb12a service nova] Acquiring lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.232995] env[62875]: DEBUG oslo_concurrency.lockutils [req-46def67d-0a98-4191-9c6a-9ce5acb16bc7 req-b8c24f0d-ac25-465a-9164-8954248eb12a service nova] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.233190] env[62875]: DEBUG oslo_concurrency.lockutils [req-46def67d-0a98-4191-9c6a-9ce5acb16bc7 req-b8c24f0d-ac25-465a-9164-8954248eb12a service nova] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.233409] env[62875]: DEBUG nova.compute.manager [req-46def67d-0a98-4191-9c6a-9ce5acb16bc7 req-b8c24f0d-ac25-465a-9164-8954248eb12a service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] No waiting events found dispatching network-vif-plugged-3f89ecea-c779-49ec-8304-34ae86acec1f {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2058.233547] env[62875]: WARNING nova.compute.manager [req-46def67d-0a98-4191-9c6a-9ce5acb16bc7 req-b8c24f0d-ac25-465a-9164-8954248eb12a service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received unexpected event network-vif-plugged-3f89ecea-c779-49ec-8304-34ae86acec1f for instance with vm_state building and task_state spawning. [ 2058.246420] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Successfully updated port: 3f89ecea-c779-49ec-8304-34ae86acec1f {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2058.283702] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180058, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.286562] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0987d0b9-c614-483b-a5da-ac10d91f2194 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.293463] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567d2ce8-373d-4f41-9110-8238b194ff08 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.323973] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710b9e64-c6de-4609-a0dd-322ca4456f53 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.331685] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5dd595-b2fb-495d-b7ae-95dbf1df04e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.344968] env[62875]: DEBUG nova.compute.provider_tree [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2058.605108] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523e8565-e865-c24e-e4e9-a396eeb2bc88, 'name': SearchDatastore_Task, 'duration_secs': 0.008805} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.605381] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.605649] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 8361611a-ad16-43ef-94e0-f2e7e9851682/8361611a-ad16-43ef-94e0-f2e7e9851682.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2058.605900] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e51172d4-ef8c-4a36-80e9-99d4b3f35e01 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.613475] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2058.613475] env[62875]: value = "task-2180059" [ 2058.613475] env[62875]: _type = "Task" [ 2058.613475] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.621643] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.643629] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Volume attach. Driver type: vmdk {{(pid=62875) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2058.643932] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-444892', 'volume_id': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'name': 'volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '305aebbe-f983-4826-b8c0-9854458f7d48', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'serial': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2058.644912] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d24125-8079-4f37-aa38-343a9147f583 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.671982] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb38f680-4aa0-434e-bc96-bc9db3400e2a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.697211] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6/volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2058.697607] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b6f28a2-c76a-462a-a987-099b0bc42a14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.716633] env[62875]: DEBUG oslo_vmware.api [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2058.716633] env[62875]: value = "task-2180060" [ 2058.716633] env[62875]: _type = "Task" [ 2058.716633] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.724723] env[62875]: DEBUG oslo_vmware.api [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180060, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.780392] env[62875]: DEBUG oslo_vmware.api [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180058, 'name': PowerOnVM_Task, 'duration_secs': 0.648921} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.780674] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2058.780917] env[62875]: INFO nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Took 8.83 seconds to spawn the instance on the hypervisor. [ 2058.781162] env[62875]: DEBUG nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2058.782087] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4068de-ee01-4d14-a7b3-7d7f148a68f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.848071] env[62875]: DEBUG nova.scheduler.client.report [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2058.993654] env[62875]: DEBUG nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2059.037658] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2059.038587] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2059.038877] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2059.039179] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2059.039435] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2059.039703] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2059.040033] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2059.040321] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2059.040597] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2059.040902] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2059.043022] env[62875]: DEBUG nova.virt.hardware [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2059.043022] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4a450d-e49f-48d3-87b3-bc4ab130db2a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.050914] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2494a3-9951-4a38-b99e-4e43e6a1052d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.123863] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497282} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.124476] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 8361611a-ad16-43ef-94e0-f2e7e9851682/8361611a-ad16-43ef-94e0-f2e7e9851682.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2059.124792] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2059.125524] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83a5482f-84e6-4678-a4e7-c5c75b2dc481 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.132120] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2059.132120] env[62875]: value = "task-2180061" [ 2059.132120] env[62875]: _type = "Task" [ 2059.132120] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.140499] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.225998] env[62875]: DEBUG oslo_vmware.api [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.237611] env[62875]: DEBUG nova.compute.manager [req-189c26bd-8b51-4fdd-a6d4-fcd6f07a371d req-0b15185c-0aff-4f1f-88fc-5b923172d3e8 service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Received event network-vif-plugged-19bc9bc7-4c85-4add-a788-b2b0b2376185 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2059.237862] env[62875]: DEBUG oslo_concurrency.lockutils [req-189c26bd-8b51-4fdd-a6d4-fcd6f07a371d req-0b15185c-0aff-4f1f-88fc-5b923172d3e8 service nova] Acquiring lock "816e0ecb-6476-49bb-9fea-a01067f25b51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2059.238103] env[62875]: DEBUG oslo_concurrency.lockutils [req-189c26bd-8b51-4fdd-a6d4-fcd6f07a371d req-0b15185c-0aff-4f1f-88fc-5b923172d3e8 service nova] Lock "816e0ecb-6476-49bb-9fea-a01067f25b51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2059.238320] env[62875]: DEBUG oslo_concurrency.lockutils [req-189c26bd-8b51-4fdd-a6d4-fcd6f07a371d req-0b15185c-0aff-4f1f-88fc-5b923172d3e8 service nova] Lock "816e0ecb-6476-49bb-9fea-a01067f25b51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.238498] env[62875]: DEBUG nova.compute.manager [req-189c26bd-8b51-4fdd-a6d4-fcd6f07a371d req-0b15185c-0aff-4f1f-88fc-5b923172d3e8 service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] No waiting events found dispatching network-vif-plugged-19bc9bc7-4c85-4add-a788-b2b0b2376185 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2059.238675] env[62875]: WARNING nova.compute.manager [req-189c26bd-8b51-4fdd-a6d4-fcd6f07a371d req-0b15185c-0aff-4f1f-88fc-5b923172d3e8 service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Received unexpected event network-vif-plugged-19bc9bc7-4c85-4add-a788-b2b0b2376185 for instance with vm_state building and task_state spawning. [ 2059.300592] env[62875]: INFO nova.compute.manager [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Took 34.86 seconds to build instance. [ 2059.308903] env[62875]: DEBUG nova.network.neutron [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Successfully updated port: 19bc9bc7-4c85-4add-a788-b2b0b2376185 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2059.354060] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.354186] env[62875]: DEBUG nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2059.356676] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.628s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2059.358229] env[62875]: INFO nova.compute.claims [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2059.643683] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066641} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.644286] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2059.644782] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6ddca1-ba93-4edd-a9a4-baafd065755c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.671371] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 8361611a-ad16-43ef-94e0-f2e7e9851682/8361611a-ad16-43ef-94e0-f2e7e9851682.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2059.672154] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea95a8f5-6950-4691-9b5f-97d1af7e9de9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.693400] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2059.693400] env[62875]: value = "task-2180062" [ 2059.693400] env[62875]: _type = "Task" [ 2059.693400] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.701810] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180062, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.726037] env[62875]: DEBUG oslo_vmware.api [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180060, 'name': ReconfigVM_Task, 'duration_secs': 0.816936} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.726551] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Reconfigured VM instance instance-0000002c to attach disk [datastore2] volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6/volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2059.732387] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-605fe757-a262-4af0-baa9-c03ca2db7486 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.749270] env[62875]: DEBUG oslo_vmware.api [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2059.749270] env[62875]: value = "task-2180063" [ 2059.749270] env[62875]: _type = "Task" [ 2059.749270] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.758131] env[62875]: DEBUG oslo_vmware.api [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180063, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.802960] env[62875]: DEBUG oslo_concurrency.lockutils [None req-77e0a953-d996-4b4c-b7f6-d57468af838d tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "d0c4095f-2d78-4055-b568-7e70e7c4c182" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.668s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.812722] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "refresh_cache-816e0ecb-6476-49bb-9fea-a01067f25b51" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.812722] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "refresh_cache-816e0ecb-6476-49bb-9fea-a01067f25b51" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.812722] env[62875]: DEBUG nova.network.neutron [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2059.865348] env[62875]: DEBUG nova.compute.utils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2059.869145] env[62875]: DEBUG nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2059.869265] env[62875]: DEBUG nova.network.neutron [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2059.909247] env[62875]: DEBUG nova.policy [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52e8b57145f04735b46c68ee0f52a031', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5eb2a81ac944f0ca4b81df00d4859e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2060.173902] env[62875]: DEBUG nova.network.neutron [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Successfully created port: c26e9371-7026-4263-9b30-4e9735c336ac {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2060.204889] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180062, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.259701] env[62875]: DEBUG oslo_vmware.api [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.305931] env[62875]: DEBUG nova.compute.manager [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2060.322208] env[62875]: DEBUG nova.compute.manager [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-changed-3f89ecea-c779-49ec-8304-34ae86acec1f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2060.322400] env[62875]: DEBUG nova.compute.manager [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Refreshing instance network info cache due to event network-changed-3f89ecea-c779-49ec-8304-34ae86acec1f. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2060.322687] env[62875]: DEBUG oslo_concurrency.lockutils [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] Acquiring lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.322765] env[62875]: DEBUG oslo_concurrency.lockutils [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] Acquired lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.322924] env[62875]: DEBUG nova.network.neutron [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Refreshing network info cache for port 3f89ecea-c779-49ec-8304-34ae86acec1f {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2060.369595] env[62875]: DEBUG nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2060.402190] env[62875]: DEBUG nova.network.neutron [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2060.656242] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192b7700-1d9d-4b85-afe8-834d53da5be3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.664175] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890e4a06-5111-49a5-aba6-1c663ed699a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.699250] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea592c3d-6e77-4cc4-8d34-86f624ed2c93 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.709835] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180062, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.710707] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Successfully updated port: cf16fb35-456a-450b-8a75-bac02d08f481 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2060.718832] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fdf555-b556-4165-82c5-99696e74026e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.736901] env[62875]: DEBUG nova.compute.provider_tree [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2060.762249] env[62875]: DEBUG oslo_vmware.api [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180063, 'name': ReconfigVM_Task, 'duration_secs': 0.840742} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.762635] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-444892', 'volume_id': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'name': 'volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '305aebbe-f983-4826-b8c0-9854458f7d48', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'serial': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2060.785156] env[62875]: DEBUG nova.network.neutron [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Updating instance_info_cache with network_info: [{"id": "19bc9bc7-4c85-4add-a788-b2b0b2376185", "address": "fa:16:3e:3c:06:a2", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19bc9bc7-4c", "ovs_interfaceid": "19bc9bc7-4c85-4add-a788-b2b0b2376185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.828537] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.889893] env[62875]: DEBUG nova.network.neutron [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2061.062886] env[62875]: DEBUG nova.network.neutron [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.206282] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180062, 'name': ReconfigVM_Task, 'duration_secs': 1.153048} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.206601] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 8361611a-ad16-43ef-94e0-f2e7e9851682/8361611a-ad16-43ef-94e0-f2e7e9851682.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2061.207244] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce77a328-a382-4830-94f7-6340877c4e2d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.213045] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2061.213045] env[62875]: value = "task-2180064" [ 2061.213045] env[62875]: _type = "Task" [ 2061.213045] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.220972] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180064, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.240540] env[62875]: DEBUG nova.scheduler.client.report [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2061.287103] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "refresh_cache-816e0ecb-6476-49bb-9fea-a01067f25b51" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.287775] env[62875]: DEBUG nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Instance network_info: |[{"id": "19bc9bc7-4c85-4add-a788-b2b0b2376185", "address": "fa:16:3e:3c:06:a2", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19bc9bc7-4c", "ovs_interfaceid": "19bc9bc7-4c85-4add-a788-b2b0b2376185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2061.288152] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:06:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19bc9bc7-4c85-4add-a788-b2b0b2376185', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2061.295709] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Creating folder: Project (226340868e7446cca12688a32d13c630). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2061.296358] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b21d4a5b-8a1e-4868-93f4-d80ab8e41dcd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.307566] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Created folder: Project (226340868e7446cca12688a32d13c630) in parent group-v444854. [ 2061.307912] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Creating folder: Instances. Parent ref: group-v444896. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2061.308124] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f1aca76-f76a-48cb-94f0-07fb57c4876d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.317582] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Created folder: Instances in parent group-v444896. [ 2061.320296] env[62875]: DEBUG oslo.service.loopingcall [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2061.320296] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2061.320296] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d625b208-2406-43a0-a840-4c91446b2aa8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.338938] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2061.338938] env[62875]: value = "task-2180067" [ 2061.338938] env[62875]: _type = "Task" [ 2061.338938] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.347446] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180067, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.381534] env[62875]: DEBUG nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2061.400313] env[62875]: DEBUG nova.compute.manager [req-bcdd9de9-ac4b-4212-ab7b-41c9fb8366e0 req-c8c2b2bf-36a7-4748-be09-cdc998474dec service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Received event network-changed-19bc9bc7-4c85-4add-a788-b2b0b2376185 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2061.400936] env[62875]: DEBUG nova.compute.manager [req-bcdd9de9-ac4b-4212-ab7b-41c9fb8366e0 req-c8c2b2bf-36a7-4748-be09-cdc998474dec service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Refreshing instance network info cache due to event network-changed-19bc9bc7-4c85-4add-a788-b2b0b2376185. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2061.400936] env[62875]: DEBUG oslo_concurrency.lockutils [req-bcdd9de9-ac4b-4212-ab7b-41c9fb8366e0 req-c8c2b2bf-36a7-4748-be09-cdc998474dec service nova] Acquiring lock "refresh_cache-816e0ecb-6476-49bb-9fea-a01067f25b51" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.401186] env[62875]: DEBUG oslo_concurrency.lockutils [req-bcdd9de9-ac4b-4212-ab7b-41c9fb8366e0 req-c8c2b2bf-36a7-4748-be09-cdc998474dec service nova] Acquired lock "refresh_cache-816e0ecb-6476-49bb-9fea-a01067f25b51" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.401337] env[62875]: DEBUG nova.network.neutron [req-bcdd9de9-ac4b-4212-ab7b-41c9fb8366e0 req-c8c2b2bf-36a7-4748-be09-cdc998474dec service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Refreshing network info cache for port 19bc9bc7-4c85-4add-a788-b2b0b2376185 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2061.415189] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2061.415462] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2061.415622] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2061.415807] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2061.415991] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2061.416108] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2061.416338] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2061.416526] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2061.416632] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2061.416797] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2061.416969] env[62875]: DEBUG nova.virt.hardware [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2061.418126] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40b4821-a95b-40dd-a847-a8db7d85f475 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.427590] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b394ac9d-d653-4ff8-aa82-abdb396ddaeb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.566032] env[62875]: DEBUG oslo_concurrency.lockutils [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] Releasing lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.566371] env[62875]: DEBUG nova.compute.manager [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Received event network-changed-45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2061.566562] env[62875]: DEBUG nova.compute.manager [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Refreshing instance network info cache due to event network-changed-45fd138d-48ba-4dbc-b40b-e424777fed62. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2061.566823] env[62875]: DEBUG oslo_concurrency.lockutils [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] Acquiring lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.567038] env[62875]: DEBUG oslo_concurrency.lockutils [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] Acquired lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.567226] env[62875]: DEBUG nova.network.neutron [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Refreshing network info cache for port 45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2061.678928] env[62875]: DEBUG nova.network.neutron [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Successfully updated port: c26e9371-7026-4263-9b30-4e9735c336ac {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2061.723261] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180064, 'name': Rename_Task, 'duration_secs': 0.133496} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.723620] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2061.723863] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd82d847-a9f5-4e81-a45e-487015b2d3b9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.730855] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2061.730855] env[62875]: value = "task-2180068" [ 2061.730855] env[62875]: _type = "Task" [ 2061.730855] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.740625] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.746545] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.746545] env[62875]: DEBUG nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2061.749025] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 23.904s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.749199] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.749381] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2061.749766] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.647s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.751270] env[62875]: INFO nova.compute.claims [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2061.754262] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460a69db-eb69-4f52-8f42-6f0384818747 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.763241] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feffb4d2-98d1-460b-b8a0-2a660d11c140 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.777589] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6f8bd6-be42-4d1c-a657-d1bdcba036ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.784885] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9de18e-efac-4b57-9b40-f81c72ed4c5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.818333] env[62875]: DEBUG nova.objects.instance [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lazy-loading 'flavor' on Instance uuid 305aebbe-f983-4826-b8c0-9854458f7d48 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2061.820426] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180974MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2061.820589] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.849132] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180067, 'name': CreateVM_Task, 'duration_secs': 0.332761} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.849993] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2061.850276] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.850276] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.854019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2061.854019] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1be555e-211d-433c-83ff-f4d7af59731f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.855858] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2061.855858] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52638723-ce24-431e-a3fb-fce789134b36" [ 2061.855858] env[62875]: _type = "Task" [ 2061.855858] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.867289] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52638723-ce24-431e-a3fb-fce789134b36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.130223] env[62875]: DEBUG nova.network.neutron [req-bcdd9de9-ac4b-4212-ab7b-41c9fb8366e0 req-c8c2b2bf-36a7-4748-be09-cdc998474dec service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Updated VIF entry in instance network info cache for port 19bc9bc7-4c85-4add-a788-b2b0b2376185. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2062.130586] env[62875]: DEBUG nova.network.neutron [req-bcdd9de9-ac4b-4212-ab7b-41c9fb8366e0 req-c8c2b2bf-36a7-4748-be09-cdc998474dec service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Updating instance_info_cache with network_info: [{"id": "19bc9bc7-4c85-4add-a788-b2b0b2376185", "address": "fa:16:3e:3c:06:a2", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19bc9bc7-4c", "ovs_interfaceid": "19bc9bc7-4c85-4add-a788-b2b0b2376185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2062.182136] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "refresh_cache-2cf54268-5499-49c9-8029-68b3866581d0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.182136] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquired lock "refresh_cache-2cf54268-5499-49c9-8029-68b3866581d0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.182136] env[62875]: DEBUG nova.network.neutron [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2062.241971] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180068, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.250392] env[62875]: DEBUG nova.compute.utils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2062.251682] env[62875]: DEBUG nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2062.251854] env[62875]: DEBUG nova.network.neutron [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2062.294181] env[62875]: DEBUG nova.policy [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e9432e680f5437dae304105a02c2b36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e48cead7352349dbab0d47c19e048eae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2062.323689] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bdcacf62-9173-4d3c-b752-f03f117a3a98 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.299s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.366615] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52638723-ce24-431e-a3fb-fce789134b36, 'name': SearchDatastore_Task, 'duration_secs': 0.010695} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.366809] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.367086] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2062.369016] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.369016] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.369016] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2062.369016] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-684d74f1-90c5-43f7-8bf8-eebdb4827779 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.376537] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2062.376958] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2062.377448] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b2b9a01-2cc4-40a6-b267-dde9d0715164 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.387053] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2062.387053] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523015b4-8796-9093-9a5f-3f9cddac3385" [ 2062.387053] env[62875]: _type = "Task" [ 2062.387053] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.399329] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523015b4-8796-9093-9a5f-3f9cddac3385, 'name': SearchDatastore_Task, 'duration_secs': 0.008815} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.400214] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04127365-147b-4995-87d8-383708e33f44 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.405601] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2062.405601] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac4228-c834-2c29-1df8-60c4c74128f1" [ 2062.405601] env[62875]: _type = "Task" [ 2062.405601] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.417120] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac4228-c834-2c29-1df8-60c4c74128f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.483592] env[62875]: DEBUG nova.network.neutron [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Updated VIF entry in instance network info cache for port 45fd138d-48ba-4dbc-b40b-e424777fed62. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2062.484065] env[62875]: DEBUG nova.network.neutron [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Updating instance_info_cache with network_info: [{"id": "45fd138d-48ba-4dbc-b40b-e424777fed62", "address": "fa:16:3e:bb:5c:21", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45fd138d-48", "ovs_interfaceid": "45fd138d-48ba-4dbc-b40b-e424777fed62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2062.513028] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "305aebbe-f983-4826-b8c0-9854458f7d48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2062.513281] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.513432] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "305aebbe-f983-4826-b8c0-9854458f7d48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2062.513617] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.513784] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.516668] env[62875]: INFO nova.compute.manager [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Terminating instance [ 2062.595708] env[62875]: DEBUG nova.network.neutron [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Successfully created port: c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2062.635859] env[62875]: DEBUG oslo_concurrency.lockutils [req-bcdd9de9-ac4b-4212-ab7b-41c9fb8366e0 req-c8c2b2bf-36a7-4748-be09-cdc998474dec service nova] Releasing lock "refresh_cache-816e0ecb-6476-49bb-9fea-a01067f25b51" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.662847] env[62875]: DEBUG nova.compute.manager [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-vif-plugged-cf16fb35-456a-450b-8a75-bac02d08f481 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2062.662847] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Acquiring lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2062.663063] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.663259] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.663449] env[62875]: DEBUG nova.compute.manager [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] No waiting events found dispatching network-vif-plugged-cf16fb35-456a-450b-8a75-bac02d08f481 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2062.663632] env[62875]: WARNING nova.compute.manager [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received unexpected event network-vif-plugged-cf16fb35-456a-450b-8a75-bac02d08f481 for instance with vm_state building and task_state spawning. [ 2062.663816] env[62875]: DEBUG nova.compute.manager [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-changed-cf16fb35-456a-450b-8a75-bac02d08f481 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2062.664009] env[62875]: DEBUG nova.compute.manager [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Refreshing instance network info cache due to event network-changed-cf16fb35-456a-450b-8a75-bac02d08f481. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2062.664328] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Acquiring lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.664498] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Acquired lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.664669] env[62875]: DEBUG nova.network.neutron [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Refreshing network info cache for port cf16fb35-456a-450b-8a75-bac02d08f481 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2062.739183] env[62875]: DEBUG nova.network.neutron [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2062.753714] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180068, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.755357] env[62875]: DEBUG nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2062.921923] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac4228-c834-2c29-1df8-60c4c74128f1, 'name': SearchDatastore_Task, 'duration_secs': 0.008637} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.922367] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.922643] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 816e0ecb-6476-49bb-9fea-a01067f25b51/816e0ecb-6476-49bb-9fea-a01067f25b51.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2062.922894] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b5ed693-fdfe-4c66-a58c-ffe848079c8b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.931709] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2062.931709] env[62875]: value = "task-2180069" [ 2062.931709] env[62875]: _type = "Task" [ 2062.931709] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.942021] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.989317] env[62875]: DEBUG oslo_concurrency.lockutils [req-c88a025e-f936-44f7-9bb8-32489324fefb req-8ba9376e-5077-48b0-9d6d-520f6e528cff service nova] Releasing lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.020860] env[62875]: DEBUG nova.compute.manager [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2063.021190] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2063.021450] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9767ecad-e0d9-4c9e-b558-62470b037632 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.028460] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2063.028460] env[62875]: value = "task-2180070" [ 2063.028460] env[62875]: _type = "Task" [ 2063.028460] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.039567] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.057177] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144bb317-000f-4bf8-ac53-78e7ad380390 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.064700] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6014d3ec-b876-4e42-a2bf-2123f9507b39 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.100520] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3959e6ea-ab81-49d1-b5e3-fc12b502c3d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.108673] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b7951a-e75a-452f-af17-7e3c7ae82d26 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.123243] env[62875]: DEBUG nova.compute.provider_tree [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2063.210817] env[62875]: DEBUG nova.network.neutron [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2063.243818] env[62875]: DEBUG oslo_vmware.api [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180068, 'name': PowerOnVM_Task, 'duration_secs': 1.088111} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.244137] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2063.244378] env[62875]: INFO nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Took 9.10 seconds to spawn the instance on the hypervisor. [ 2063.244644] env[62875]: DEBUG nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2063.245576] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2f1101-f415-484e-bb31-a5f3a90b13d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.321531] env[62875]: DEBUG nova.network.neutron [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.354148] env[62875]: DEBUG nova.network.neutron [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Updating instance_info_cache with network_info: [{"id": "c26e9371-7026-4263-9b30-4e9735c336ac", "address": "fa:16:3e:19:fa:d2", "network": {"id": "a9240618-4dcc-448d-8e43-b1e042994447", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1230530864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5eb2a81ac944f0ca4b81df00d4859e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b250e561-3be3-4bae-be1a-162251b1ee31", "external-id": "nsx-vlan-transportzone-464", "segmentation_id": 464, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26e9371-70", "ovs_interfaceid": "c26e9371-7026-4263-9b30-4e9735c336ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.442263] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180069, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475802} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.442478] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 816e0ecb-6476-49bb-9fea-a01067f25b51/816e0ecb-6476-49bb-9fea-a01067f25b51.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2063.442683] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2063.442926] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ace51d0e-57f0-47b9-b9db-f748fd840852 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.448461] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2063.448461] env[62875]: value = "task-2180071" [ 2063.448461] env[62875]: _type = "Task" [ 2063.448461] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.459178] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.540763] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180070, 'name': PowerOffVM_Task, 'duration_secs': 0.208156} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.541821] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2063.541821] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Volume detach. Driver type: vmdk {{(pid=62875) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2063.541821] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-444892', 'volume_id': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'name': 'volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '305aebbe-f983-4826-b8c0-9854458f7d48', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'serial': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2063.542199] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081f0e5e-c0a1-4e3a-a54f-9b6aeaa487d6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.563538] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a09824-52a5-4d82-9493-163d1b7f7140 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.570569] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06ab26f-5a23-46d5-9000-8a43f694d31a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.590941] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3dea6b-9edc-4a8f-bbef-eb25b3fc5d17 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.605399] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] The volume has not been displaced from its original location: [datastore2] volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6/volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6.vmdk. No consolidation needed. {{(pid=62875) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2063.610879] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Reconfiguring VM instance instance-0000002c to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2063.611201] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcfc9013-7f63-41ed-9c3a-b54a7b39a6a0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.626786] env[62875]: DEBUG nova.scheduler.client.report [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2063.631672] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2063.631672] env[62875]: value = "task-2180072" [ 2063.631672] env[62875]: _type = "Task" [ 2063.631672] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.639929] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180072, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.683314] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Successfully updated port: 910a7aba-2677-4608-af5a-5efee055c3aa {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2063.731953] env[62875]: DEBUG nova.compute.manager [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Received event network-vif-plugged-c26e9371-7026-4263-9b30-4e9735c336ac {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2063.733102] env[62875]: DEBUG oslo_concurrency.lockutils [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] Acquiring lock "2cf54268-5499-49c9-8029-68b3866581d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.733102] env[62875]: DEBUG oslo_concurrency.lockutils [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] Lock "2cf54268-5499-49c9-8029-68b3866581d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.733102] env[62875]: DEBUG oslo_concurrency.lockutils [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] Lock "2cf54268-5499-49c9-8029-68b3866581d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.733102] env[62875]: DEBUG nova.compute.manager [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] No waiting events found dispatching network-vif-plugged-c26e9371-7026-4263-9b30-4e9735c336ac {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2063.733102] env[62875]: WARNING nova.compute.manager [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Received unexpected event network-vif-plugged-c26e9371-7026-4263-9b30-4e9735c336ac for instance with vm_state building and task_state spawning. [ 2063.733601] env[62875]: DEBUG nova.compute.manager [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Received event network-changed-c26e9371-7026-4263-9b30-4e9735c336ac {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2063.733601] env[62875]: DEBUG nova.compute.manager [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Refreshing instance network info cache due to event network-changed-c26e9371-7026-4263-9b30-4e9735c336ac. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2063.733661] env[62875]: DEBUG oslo_concurrency.lockutils [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] Acquiring lock "refresh_cache-2cf54268-5499-49c9-8029-68b3866581d0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2063.763957] env[62875]: INFO nova.compute.manager [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Took 37.39 seconds to build instance. [ 2063.766910] env[62875]: DEBUG nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2063.794303] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2063.794547] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2063.794711] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2063.794893] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2063.795046] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2063.795202] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2063.798012] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2063.798012] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2063.798012] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2063.798012] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2063.798012] env[62875]: DEBUG nova.virt.hardware [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2063.798279] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfd4232-a658-4e1c-9b6e-b274457cea24 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.805924] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465bea7d-27d2-439d-ba71-4e3e39652892 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.823434] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Releasing lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.823575] env[62875]: DEBUG nova.compute.manager [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Received event network-changed-45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2063.823746] env[62875]: DEBUG nova.compute.manager [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Refreshing instance network info cache due to event network-changed-45fd138d-48ba-4dbc-b40b-e424777fed62. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2063.823959] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Acquiring lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2063.824125] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Acquired lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2063.824292] env[62875]: DEBUG nova.network.neutron [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Refreshing network info cache for port 45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2063.856929] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Releasing lock "refresh_cache-2cf54268-5499-49c9-8029-68b3866581d0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.857298] env[62875]: DEBUG nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Instance network_info: |[{"id": "c26e9371-7026-4263-9b30-4e9735c336ac", "address": "fa:16:3e:19:fa:d2", "network": {"id": "a9240618-4dcc-448d-8e43-b1e042994447", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1230530864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5eb2a81ac944f0ca4b81df00d4859e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b250e561-3be3-4bae-be1a-162251b1ee31", "external-id": "nsx-vlan-transportzone-464", "segmentation_id": 464, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26e9371-70", "ovs_interfaceid": "c26e9371-7026-4263-9b30-4e9735c336ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2063.857597] env[62875]: DEBUG oslo_concurrency.lockutils [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] Acquired lock "refresh_cache-2cf54268-5499-49c9-8029-68b3866581d0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2063.857769] env[62875]: DEBUG nova.network.neutron [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Refreshing network info cache for port c26e9371-7026-4263-9b30-4e9735c336ac {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2063.862038] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:fa:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b250e561-3be3-4bae-be1a-162251b1ee31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c26e9371-7026-4263-9b30-4e9735c336ac', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2063.866277] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Creating folder: Project (f5eb2a81ac944f0ca4b81df00d4859e7). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2063.871270] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72542738-90fc-4d0c-9898-8516c2027d07 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.886585] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Created folder: Project (f5eb2a81ac944f0ca4b81df00d4859e7) in parent group-v444854. [ 2063.886788] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Creating folder: Instances. Parent ref: group-v444899. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2063.887034] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d482218-8165-4b3a-a4ad-d9052c13daa4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.899045] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Created folder: Instances in parent group-v444899. [ 2063.900023] env[62875]: DEBUG oslo.service.loopingcall [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2063.900023] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2063.900023] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cd593ff-5707-45e6-95f6-983719bb97ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.922436] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2063.922436] env[62875]: value = "task-2180075" [ 2063.922436] env[62875]: _type = "Task" [ 2063.922436] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.930262] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180075, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.961007] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055276} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.961281] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2063.962480] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2549e02c-24fc-48d3-a04d-e5f2d6e511f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.985607] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 816e0ecb-6476-49bb-9fea-a01067f25b51/816e0ecb-6476-49bb-9fea-a01067f25b51.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2063.985946] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52a1060d-e571-4bae-bb17-ef004f3a5dea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.007854] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2064.007854] env[62875]: value = "task-2180076" [ 2064.007854] env[62875]: _type = "Task" [ 2064.007854] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.019997] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180076, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.127850] env[62875]: DEBUG nova.network.neutron [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Successfully updated port: c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2064.135472] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.135472] env[62875]: DEBUG nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2064.140110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.641s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.140110] env[62875]: DEBUG nova.objects.instance [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lazy-loading 'resources' on Instance uuid acc78084-21e8-456c-a573-fc5e931147c6 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2064.158350] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180072, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.185945] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.186190] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquired lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.186395] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2064.257973] env[62875]: DEBUG nova.network.neutron [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Updated VIF entry in instance network info cache for port c26e9371-7026-4263-9b30-4e9735c336ac. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2064.258697] env[62875]: DEBUG nova.network.neutron [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Updating instance_info_cache with network_info: [{"id": "c26e9371-7026-4263-9b30-4e9735c336ac", "address": "fa:16:3e:19:fa:d2", "network": {"id": "a9240618-4dcc-448d-8e43-b1e042994447", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1230530864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5eb2a81ac944f0ca4b81df00d4859e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b250e561-3be3-4bae-be1a-162251b1ee31", "external-id": "nsx-vlan-transportzone-464", "segmentation_id": 464, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26e9371-70", "ovs_interfaceid": "c26e9371-7026-4263-9b30-4e9735c336ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.266601] env[62875]: DEBUG oslo_concurrency.lockutils [None req-734ec345-8c32-4631-8129-0e7dc091ec3e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "8361611a-ad16-43ef-94e0-f2e7e9851682" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.681s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.433751] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180075, 'name': CreateVM_Task, 'duration_secs': 0.4196} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.433984] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2064.434653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.434825] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.435316] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2064.435580] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07211592-7d54-4576-a0bf-7789d718acee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.440772] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2064.440772] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524d70ba-a391-ea64-ede1-f82380735508" [ 2064.440772] env[62875]: _type = "Task" [ 2064.440772] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.449705] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524d70ba-a391-ea64-ede1-f82380735508, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.518604] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180076, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.631010] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquiring lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.631206] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquired lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.631381] env[62875]: DEBUG nova.network.neutron [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2064.643345] env[62875]: DEBUG nova.compute.utils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2064.647393] env[62875]: DEBUG nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2064.647393] env[62875]: DEBUG nova.network.neutron [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2064.661640] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180072, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.724596] env[62875]: DEBUG nova.policy [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86e44a4e203b49e09a8f9d2bb45b8079', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95d0f81815ea467cbc1c6160e27409fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2064.734245] env[62875]: DEBUG nova.network.neutron [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Updated VIF entry in instance network info cache for port 45fd138d-48ba-4dbc-b40b-e424777fed62. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2064.734652] env[62875]: DEBUG nova.network.neutron [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Updating instance_info_cache with network_info: [{"id": "45fd138d-48ba-4dbc-b40b-e424777fed62", "address": "fa:16:3e:bb:5c:21", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45fd138d-48", "ovs_interfaceid": "45fd138d-48ba-4dbc-b40b-e424777fed62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.754720] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2064.764856] env[62875]: DEBUG oslo_concurrency.lockutils [req-45901f18-8639-48c5-9e81-926bec88a469 req-d234f161-1588-4da5-8e3a-19e6d979137c service nova] Releasing lock "refresh_cache-2cf54268-5499-49c9-8029-68b3866581d0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.769537] env[62875]: DEBUG nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2064.878962] env[62875]: DEBUG nova.compute.manager [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-vif-plugged-910a7aba-2677-4608-af5a-5efee055c3aa {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2064.879213] env[62875]: DEBUG oslo_concurrency.lockutils [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] Acquiring lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.879423] env[62875]: DEBUG oslo_concurrency.lockutils [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.879587] env[62875]: DEBUG oslo_concurrency.lockutils [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.879779] env[62875]: DEBUG nova.compute.manager [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] No waiting events found dispatching network-vif-plugged-910a7aba-2677-4608-af5a-5efee055c3aa {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2064.880059] env[62875]: WARNING nova.compute.manager [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received unexpected event network-vif-plugged-910a7aba-2677-4608-af5a-5efee055c3aa for instance with vm_state building and task_state spawning. [ 2064.880369] env[62875]: DEBUG nova.compute.manager [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-changed-910a7aba-2677-4608-af5a-5efee055c3aa {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2064.880558] env[62875]: DEBUG nova.compute.manager [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Refreshing instance network info cache due to event network-changed-910a7aba-2677-4608-af5a-5efee055c3aa. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2064.880731] env[62875]: DEBUG oslo_concurrency.lockutils [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] Acquiring lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.951757] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524d70ba-a391-ea64-ede1-f82380735508, 'name': SearchDatastore_Task, 'duration_secs': 0.009847} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.958152] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.959307] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2064.959307] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.959307] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.959307] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2064.959479] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ff45c30-21f3-4b8a-86d6-952830db8577 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.968253] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2064.968441] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2064.969211] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89039dd5-714b-4fba-8657-5f4d0d0b7dd9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.980239] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2064.980239] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52854c95-6bf4-2476-6e6b-29044518eaf1" [ 2064.980239] env[62875]: _type = "Task" [ 2064.980239] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.990471] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52854c95-6bf4-2476-6e6b-29044518eaf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.992288] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea60a0b-fd34-4426-8d6e-294b29af6bfc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.002465] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15c23f3-95a7-47fa-8184-4a0b44e9f4d5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.040965] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5059d85-9d48-42a2-8c0e-0380b764014d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.047246] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180076, 'name': ReconfigVM_Task, 'duration_secs': 1.018844} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.047853] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 816e0ecb-6476-49bb-9fea-a01067f25b51/816e0ecb-6476-49bb-9fea-a01067f25b51.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2065.048500] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49c01193-c9a1-46c3-bb5d-88c229854700 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.052449] env[62875]: DEBUG nova.network.neutron [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Successfully created port: 8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2065.055550] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce03cf2-5f49-45a4-991e-4db6ea7f3a67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.061054] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2065.061054] env[62875]: value = "task-2180077" [ 2065.061054] env[62875]: _type = "Task" [ 2065.061054] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.072488] env[62875]: DEBUG nova.compute.provider_tree [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2065.081503] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180077, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.147876] env[62875]: DEBUG nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2065.150547] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180072, 'name': ReconfigVM_Task, 'duration_secs': 1.227046} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.151508] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Reconfigured VM instance instance-0000002c to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2065.157278] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93c6ef37-c8b4-4e1a-ba1b-23984934f141 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.173432] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2065.173432] env[62875]: value = "task-2180078" [ 2065.173432] env[62875]: _type = "Task" [ 2065.173432] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.185020] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.227891] env[62875]: DEBUG nova.network.neutron [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2065.240206] env[62875]: DEBUG oslo_concurrency.lockutils [req-789cbed8-430f-40c9-ba2a-bf349723a2ec req-e8798fb5-765a-4a2a-b353-05de9753857b service nova] Releasing lock "refresh_cache-d0c4095f-2d78-4055-b568-7e70e7c4c182" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2065.289791] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.324172] env[62875]: INFO nova.compute.manager [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Rebuilding instance [ 2065.368983] env[62875]: DEBUG nova.compute.manager [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2065.369980] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6774c577-6a0b-4798-ab84-20e68d4e6f84 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.492273] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52854c95-6bf4-2476-6e6b-29044518eaf1, 'name': SearchDatastore_Task, 'duration_secs': 0.008954} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.493315] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cbd610d-6380-4856-bc4a-4472122c87ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.499045] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2065.499045] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ec3894-ba1e-253a-04b2-e4795551a3ee" [ 2065.499045] env[62875]: _type = "Task" [ 2065.499045] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.512637] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ec3894-ba1e-253a-04b2-e4795551a3ee, 'name': SearchDatastore_Task, 'duration_secs': 0.010032} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.512908] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2065.513193] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 2cf54268-5499-49c9-8029-68b3866581d0/2cf54268-5499-49c9-8029-68b3866581d0.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2065.513463] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bb4a75c-3d3b-480e-9bba-4483042ed342 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.519414] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2065.519414] env[62875]: value = "task-2180079" [ 2065.519414] env[62875]: _type = "Task" [ 2065.519414] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.527325] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.570978] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180077, 'name': Rename_Task, 'duration_secs': 0.12605} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.571339] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2065.574339] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a94d500e-8130-43ee-a5cc-d8e497062f2f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.584475] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2065.584475] env[62875]: value = "task-2180080" [ 2065.584475] env[62875]: _type = "Task" [ 2065.584475] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.592547] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180080, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.618911] env[62875]: DEBUG nova.scheduler.client.report [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2065.621447] env[62875]: DEBUG nova.compute.provider_tree [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 67 to 68 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2065.621447] env[62875]: DEBUG nova.compute.provider_tree [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2065.624394] env[62875]: DEBUG nova.network.neutron [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Updating instance_info_cache with network_info: [{"id": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "address": "fa:16:3e:8b:f8:16", "network": {"id": "86c8f999-67e2-4985-82bb-3f9c44f0fbb8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1235728423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48cead7352349dbab0d47c19e048eae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a5613b-53", "ovs_interfaceid": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.686806] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180078, 'name': ReconfigVM_Task, 'duration_secs': 0.136529} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.686806] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-444892', 'volume_id': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'name': 'volume-e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '305aebbe-f983-4826-b8c0-9854458f7d48', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6', 'serial': 'e2465556-b4a9-4f12-9fd7-73c7f79b83f6'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2065.686806] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2065.687184] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a87622-45ba-45eb-b9b5-1b7d8820cba1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.694725] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2065.694984] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbc483bf-5bf5-49ee-bc00-bc17b5fce8f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.738289] env[62875]: DEBUG nova.network.neutron [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updating instance_info_cache with network_info: [{"id": "3f89ecea-c779-49ec-8304-34ae86acec1f", "address": "fa:16:3e:6f:74:65", "network": {"id": "b5106368-959d-4519-bfe6-05d3048ec300", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37495441", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.211", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f89ecea-c7", "ovs_interfaceid": "3f89ecea-c779-49ec-8304-34ae86acec1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cf16fb35-456a-450b-8a75-bac02d08f481", "address": "fa:16:3e:44:9c:5e", "network": {"id": "be7b865a-da9e-4284-9313-6199ab2240a3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-308012527", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf16fb35-45", "ovs_interfaceid": "cf16fb35-456a-450b-8a75-bac02d08f481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "910a7aba-2677-4608-af5a-5efee055c3aa", "address": "fa:16:3e:ea:81:ef", "network": {"id": "b5106368-959d-4519-bfe6-05d3048ec300", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37495441", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap910a7aba-26", "ovs_interfaceid": "910a7aba-2677-4608-af5a-5efee055c3aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.771052] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2065.771361] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2065.771585] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleting the datastore file [datastore1] 305aebbe-f983-4826-b8c0-9854458f7d48 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2065.771897] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a88c84be-ec7d-4449-88e7-33589a2e0094 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.781205] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2065.781205] env[62875]: value = "task-2180082" [ 2065.781205] env[62875]: _type = "Task" [ 2065.781205] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.790882] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.877818] env[62875]: DEBUG nova.compute.manager [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Received event network-vif-plugged-c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2065.877818] env[62875]: DEBUG oslo_concurrency.lockutils [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] Acquiring lock "9e0aaea6-96cf-494d-9f70-a709a47f9772-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.877818] env[62875]: DEBUG oslo_concurrency.lockutils [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] Lock "9e0aaea6-96cf-494d-9f70-a709a47f9772-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.877818] env[62875]: DEBUG oslo_concurrency.lockutils [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] Lock "9e0aaea6-96cf-494d-9f70-a709a47f9772-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.878056] env[62875]: DEBUG nova.compute.manager [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] No waiting events found dispatching network-vif-plugged-c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2065.878221] env[62875]: WARNING nova.compute.manager [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Received unexpected event network-vif-plugged-c4a5613b-5345-49d7-b791-29a0dbe58ed2 for instance with vm_state building and task_state spawning. [ 2065.878493] env[62875]: DEBUG nova.compute.manager [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Received event network-changed-c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2065.878629] env[62875]: DEBUG nova.compute.manager [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Refreshing instance network info cache due to event network-changed-c4a5613b-5345-49d7-b791-29a0dbe58ed2. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2065.878813] env[62875]: DEBUG oslo_concurrency.lockutils [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] Acquiring lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.030941] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180079, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472623} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.031400] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 2cf54268-5499-49c9-8029-68b3866581d0/2cf54268-5499-49c9-8029-68b3866581d0.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2066.031781] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2066.032170] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1cfea71e-5f67-4278-879a-c9194fc2e72b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.040691] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2066.040691] env[62875]: value = "task-2180083" [ 2066.040691] env[62875]: _type = "Task" [ 2066.040691] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.049448] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180083, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.096398] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180080, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.130372] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.130909] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Releasing lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.131392] env[62875]: DEBUG nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Instance network_info: |[{"id": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "address": "fa:16:3e:8b:f8:16", "network": {"id": "86c8f999-67e2-4985-82bb-3f9c44f0fbb8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1235728423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48cead7352349dbab0d47c19e048eae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a5613b-53", "ovs_interfaceid": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2066.132110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.749s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.132466] env[62875]: DEBUG nova.objects.instance [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lazy-loading 'resources' on Instance uuid a64253fe-4ba9-4686-810b-a26a4c29631b {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2066.134068] env[62875]: DEBUG oslo_concurrency.lockutils [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] Acquired lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.134392] env[62875]: DEBUG nova.network.neutron [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Refreshing network info cache for port c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2066.135724] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:f8:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1880df72-582c-44cb-992d-88dc6a514914', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4a5613b-5345-49d7-b791-29a0dbe58ed2', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2066.151370] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Creating folder: Project (e48cead7352349dbab0d47c19e048eae). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2066.153402] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eecc9407-84ae-430a-a73b-42bc5a99472c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.160277] env[62875]: INFO nova.scheduler.client.report [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Deleted allocations for instance acc78084-21e8-456c-a573-fc5e931147c6 [ 2066.165636] env[62875]: DEBUG nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2066.169906] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Created folder: Project (e48cead7352349dbab0d47c19e048eae) in parent group-v444854. [ 2066.170244] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Creating folder: Instances. Parent ref: group-v444902. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2066.172545] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37fb4cb1-f65c-421b-9cfa-b41b46c0555d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.182021] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Created folder: Instances in parent group-v444902. [ 2066.182021] env[62875]: DEBUG oslo.service.loopingcall [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.182021] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2066.182021] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9cb21ce-7705-4fbd-9459-316789dddf34 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.206755] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2066.206755] env[62875]: value = "task-2180086" [ 2066.206755] env[62875]: _type = "Task" [ 2066.206755] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.212480] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2066.212921] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2066.213218] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2066.213515] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2066.213776] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2066.214073] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2066.214475] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2066.214796] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2066.215102] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2066.215385] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2066.215727] env[62875]: DEBUG nova.virt.hardware [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2066.216936] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e84535-ab6f-41f6-9b0c-6c3b7e59e164 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.226464] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180086, 'name': CreateVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.229977] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739de8dd-8b03-4e1c-a398-7f308b74ee3c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.245369] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Releasing lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.245998] env[62875]: DEBUG nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Instance network_info: |[{"id": "3f89ecea-c779-49ec-8304-34ae86acec1f", "address": "fa:16:3e:6f:74:65", "network": {"id": "b5106368-959d-4519-bfe6-05d3048ec300", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37495441", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.211", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f89ecea-c7", "ovs_interfaceid": "3f89ecea-c779-49ec-8304-34ae86acec1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cf16fb35-456a-450b-8a75-bac02d08f481", "address": "fa:16:3e:44:9c:5e", "network": {"id": "be7b865a-da9e-4284-9313-6199ab2240a3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-308012527", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf16fb35-45", "ovs_interfaceid": "cf16fb35-456a-450b-8a75-bac02d08f481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "910a7aba-2677-4608-af5a-5efee055c3aa", "address": "fa:16:3e:ea:81:ef", "network": {"id": "b5106368-959d-4519-bfe6-05d3048ec300", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37495441", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap910a7aba-26", "ovs_interfaceid": "910a7aba-2677-4608-af5a-5efee055c3aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2066.246697] env[62875]: DEBUG oslo_concurrency.lockutils [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] Acquired lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.247056] env[62875]: DEBUG nova.network.neutron [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Refreshing network info cache for port 910a7aba-2677-4608-af5a-5efee055c3aa {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2066.251437] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:74:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a485857d-7086-4dcf-9d65-d0dcd177fcb0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f89ecea-c779-49ec-8304-34ae86acec1f', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:9c:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf16fb35-456a-450b-8a75-bac02d08f481', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:81:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a485857d-7086-4dcf-9d65-d0dcd177fcb0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '910a7aba-2677-4608-af5a-5efee055c3aa', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2066.262589] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Creating folder: Project (63be470870764b6ab6e803cc2a345f24). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2066.263949] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16c9b956-1760-4d4e-a0bf-ffd7f28b45ca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.275228] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Created folder: Project (63be470870764b6ab6e803cc2a345f24) in parent group-v444854. [ 2066.275489] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Creating folder: Instances. Parent ref: group-v444905. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2066.275763] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-519bc627-422c-4679-b606-f6fbb0d1097f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.289822] env[62875]: DEBUG oslo_vmware.api [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277977} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.291112] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2066.291314] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2066.291496] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2066.291675] env[62875]: INFO nova.compute.manager [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Took 3.27 seconds to destroy the instance on the hypervisor. [ 2066.291913] env[62875]: DEBUG oslo.service.loopingcall [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.292131] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Created folder: Instances in parent group-v444905. [ 2066.292324] env[62875]: DEBUG oslo.service.loopingcall [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.292504] env[62875]: DEBUG nova.compute.manager [-] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2066.292603] env[62875]: DEBUG nova.network.neutron [-] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2066.294561] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2066.294561] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-237fc9ac-142b-42a5-b423-8b2b7ec13d5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.328764] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2066.328764] env[62875]: value = "task-2180089" [ 2066.328764] env[62875]: _type = "Task" [ 2066.328764] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.338886] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180089, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.383506] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2066.383924] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d504b59-a7d9-4af3-985c-41306a6112ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.393421] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2066.393421] env[62875]: value = "task-2180090" [ 2066.393421] env[62875]: _type = "Task" [ 2066.393421] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.405774] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.551984] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180083, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087691} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.552980] env[62875]: DEBUG nova.network.neutron [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Successfully updated port: 8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2066.554123] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2066.555141] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87dbadb-09e9-49bb-bbd3-2174346f7969 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.579883] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 2cf54268-5499-49c9-8029-68b3866581d0/2cf54268-5499-49c9-8029-68b3866581d0.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2066.580842] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db42abd4-fd86-4a87-a5fd-8533a6af7b11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.605216] env[62875]: DEBUG oslo_vmware.api [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180080, 'name': PowerOnVM_Task, 'duration_secs': 0.682359} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.606512] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2066.606720] env[62875]: INFO nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Took 7.61 seconds to spawn the instance on the hypervisor. [ 2066.607094] env[62875]: DEBUG nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2066.607319] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2066.607319] env[62875]: value = "task-2180091" [ 2066.607319] env[62875]: _type = "Task" [ 2066.607319] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.608017] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020bbc15-9aad-499f-8aaa-ff4d7276b5cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.623019] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180091, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.673642] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8efe0b2b-c320-4cac-aeb4-0de2a6ce5330 tempest-VolumesAdminNegativeTest-897597342 tempest-VolumesAdminNegativeTest-897597342-project-member] Lock "acc78084-21e8-456c-a573-fc5e931147c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.650s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.718032] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180086, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.846123] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180089, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.908237] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180090, 'name': PowerOffVM_Task, 'duration_secs': 0.187756} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.908864] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2066.909243] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2066.913474] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a98ea4b-fafc-4369-8599-1c5f1eab18d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.927030] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2066.927406] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed418f40-b583-4c76-a0c3-81540abae0ec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.963265] env[62875]: DEBUG nova.network.neutron [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Updated VIF entry in instance network info cache for port c4a5613b-5345-49d7-b791-29a0dbe58ed2. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2066.963265] env[62875]: DEBUG nova.network.neutron [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Updating instance_info_cache with network_info: [{"id": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "address": "fa:16:3e:8b:f8:16", "network": {"id": "86c8f999-67e2-4985-82bb-3f9c44f0fbb8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1235728423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48cead7352349dbab0d47c19e048eae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a5613b-53", "ovs_interfaceid": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.971979] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae61223-b2d3-4cc8-b3bb-8a5122f13a9a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.979729] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2d3464-47ac-489a-9e43-a01c639f8be0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.015076] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8993ada0-a096-4ef7-8de1-c3441756f2f1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.022565] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9e8b01-4683-4f24-916e-23d039bace1e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.038592] env[62875]: DEBUG nova.compute.provider_tree [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.055991] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.056162] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.056322] env[62875]: DEBUG nova.network.neutron [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2067.121072] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180091, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.133039] env[62875]: INFO nova.compute.manager [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Took 32.23 seconds to build instance. [ 2067.216652] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180086, 'name': CreateVM_Task, 'duration_secs': 0.517329} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.216781] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2067.217497] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.217659] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.217986] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2067.218262] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adb235db-5ce4-4eb3-9faf-702f2c266d5a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.221221] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2067.221468] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2067.221690] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleting the datastore file [datastore2] 8361611a-ad16-43ef-94e0-f2e7e9851682 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2067.222325] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64fb2947-e19f-4dc8-95bb-e7c40ad086a0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.225100] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2067.225100] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d2557c-7de0-1c8b-39f1-3b5abc0014ea" [ 2067.225100] env[62875]: _type = "Task" [ 2067.225100] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.229863] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2067.229863] env[62875]: value = "task-2180093" [ 2067.229863] env[62875]: _type = "Task" [ 2067.229863] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.236157] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d2557c-7de0-1c8b-39f1-3b5abc0014ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.238725] env[62875]: DEBUG nova.compute.manager [req-1089b91c-63d0-4849-bd05-fd3fef1dde3b req-804521d0-3b39-40a8-ab64-760161a52e1e service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Received event network-vif-deleted-09e96105-f947-4b3d-b097-d53f32948ee2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2067.238973] env[62875]: INFO nova.compute.manager [req-1089b91c-63d0-4849-bd05-fd3fef1dde3b req-804521d0-3b39-40a8-ab64-760161a52e1e service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Neutron deleted interface 09e96105-f947-4b3d-b097-d53f32948ee2; detaching it from the instance and deleting it from the info cache [ 2067.239304] env[62875]: DEBUG nova.network.neutron [req-1089b91c-63d0-4849-bd05-fd3fef1dde3b req-804521d0-3b39-40a8-ab64-760161a52e1e service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.247317] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.249060] env[62875]: DEBUG nova.network.neutron [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updated VIF entry in instance network info cache for port 910a7aba-2677-4608-af5a-5efee055c3aa. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2067.249060] env[62875]: DEBUG nova.network.neutron [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updating instance_info_cache with network_info: [{"id": "3f89ecea-c779-49ec-8304-34ae86acec1f", "address": "fa:16:3e:6f:74:65", "network": {"id": "b5106368-959d-4519-bfe6-05d3048ec300", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37495441", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.211", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f89ecea-c7", "ovs_interfaceid": "3f89ecea-c779-49ec-8304-34ae86acec1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cf16fb35-456a-450b-8a75-bac02d08f481", "address": "fa:16:3e:44:9c:5e", "network": {"id": "be7b865a-da9e-4284-9313-6199ab2240a3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-308012527", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf16fb35-45", "ovs_interfaceid": "cf16fb35-456a-450b-8a75-bac02d08f481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "910a7aba-2677-4608-af5a-5efee055c3aa", "address": "fa:16:3e:ea:81:ef", "network": {"id": "b5106368-959d-4519-bfe6-05d3048ec300", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37495441", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap910a7aba-26", "ovs_interfaceid": "910a7aba-2677-4608-af5a-5efee055c3aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.264052] env[62875]: DEBUG nova.network.neutron [-] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.340350] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180089, 'name': CreateVM_Task, 'duration_secs': 0.607655} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.340480] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2067.341403] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.341571] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.342014] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2067.342157] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7b4bcbf-7df0-4199-82e8-6bc1601bf050 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.346829] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2067.346829] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52050f7d-a33f-a5e1-d803-d7e10a801e9b" [ 2067.346829] env[62875]: _type = "Task" [ 2067.346829] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.355153] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52050f7d-a33f-a5e1-d803-d7e10a801e9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.466908] env[62875]: DEBUG oslo_concurrency.lockutils [req-c4ef40a6-28dd-4f91-8444-c5cd6a50a717 req-b6423b08-475d-4e93-ae5b-ad47d346abe6 service nova] Releasing lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.545019] env[62875]: DEBUG nova.scheduler.client.report [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2067.623873] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180091, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.628023] env[62875]: DEBUG nova.network.neutron [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2067.638639] env[62875]: DEBUG oslo_concurrency.lockutils [None req-044b188f-e79c-470f-9adf-b708406973bc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "816e0ecb-6476-49bb-9fea-a01067f25b51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.447s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.746194] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139826} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.746440] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d2557c-7de0-1c8b-39f1-3b5abc0014ea, 'name': SearchDatastore_Task, 'duration_secs': 0.009782} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.746660] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2067.746843] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2067.747026] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2067.749499] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.749946] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2067.750212] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.750359] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.750542] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2067.751676] env[62875]: DEBUG oslo_concurrency.lockutils [req-7918fc34-e6f5-4810-8934-e322cad82453 req-97b1d93e-fc42-424f-b0d1-bb45509b0062 service nova] Releasing lock "refresh_cache-2106a09b-554e-41dd-aa3a-c190b62d0afc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.752514] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96a8fe21-e436-48dd-9e32-adbd141f10cf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.754011] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea1ea385-5ed8-4732-af59-e91ba4010e1b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.763660] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5315cb-d87d-4a75-b379-9c7dd0afde20 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.778203] env[62875]: INFO nova.compute.manager [-] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Took 1.49 seconds to deallocate network for instance. [ 2067.778516] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2067.778684] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2067.781662] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca1ced9b-1400-48c4-991a-1b8ae6e45c97 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.788203] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2067.788203] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5220f436-7ddf-30d9-4df6-c85652eff75b" [ 2067.788203] env[62875]: _type = "Task" [ 2067.788203] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.799603] env[62875]: DEBUG nova.compute.manager [req-1089b91c-63d0-4849-bd05-fd3fef1dde3b req-804521d0-3b39-40a8-ab64-760161a52e1e service nova] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Detach interface failed, port_id=09e96105-f947-4b3d-b097-d53f32948ee2, reason: Instance 305aebbe-f983-4826-b8c0-9854458f7d48 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2067.805408] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5220f436-7ddf-30d9-4df6-c85652eff75b, 'name': SearchDatastore_Task, 'duration_secs': 0.012499} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.806179] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75949977-1eb9-487c-9b85-de5ae857c0f9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.811323] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2067.811323] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252f9ed-85f4-1596-6dc7-60b7f64a361e" [ 2067.811323] env[62875]: _type = "Task" [ 2067.811323] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.820947] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252f9ed-85f4-1596-6dc7-60b7f64a361e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.857150] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52050f7d-a33f-a5e1-d803-d7e10a801e9b, 'name': SearchDatastore_Task, 'duration_secs': 0.009486} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.857470] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.857896] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2067.858246] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.858421] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.858614] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2067.858885] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b418613-27c5-4771-baa9-5c7265950396 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.867038] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2067.867234] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2067.867997] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14856f2f-7a57-4acd-bfc1-e2f44d413fbb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.873210] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2067.873210] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529ffc50-eb3c-e146-55dd-7079f4046935" [ 2067.873210] env[62875]: _type = "Task" [ 2067.873210] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.883708] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529ffc50-eb3c-e146-55dd-7079f4046935, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.044738] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.045055] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.050081] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.056511] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.185s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.059097] env[62875]: INFO nova.compute.claims [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2068.082475] env[62875]: DEBUG nova.network.neutron [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updating instance_info_cache with network_info: [{"id": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "address": "fa:16:3e:1e:f3:2e", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba73130-21", "ovs_interfaceid": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.084402] env[62875]: INFO nova.scheduler.client.report [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Deleted allocations for instance a64253fe-4ba9-4686-810b-a26a4c29631b [ 2068.127927] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180091, 'name': ReconfigVM_Task, 'duration_secs': 1.276529} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.127927] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 2cf54268-5499-49c9-8029-68b3866581d0/2cf54268-5499-49c9-8029-68b3866581d0.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2068.127927] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a735b828-69bd-4cf4-8634-a3c2e5091434 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.132973] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2068.132973] env[62875]: value = "task-2180094" [ 2068.132973] env[62875]: _type = "Task" [ 2068.132973] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.142159] env[62875]: DEBUG nova.compute.manager [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2068.149113] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180094, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.258781] env[62875]: DEBUG nova.compute.manager [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Received event network-vif-plugged-8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2068.259193] env[62875]: DEBUG oslo_concurrency.lockutils [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] Acquiring lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.259487] env[62875]: DEBUG oslo_concurrency.lockutils [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] Lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.259724] env[62875]: DEBUG oslo_concurrency.lockutils [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] Lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.260082] env[62875]: DEBUG nova.compute.manager [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] No waiting events found dispatching network-vif-plugged-8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2068.260227] env[62875]: WARNING nova.compute.manager [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Received unexpected event network-vif-plugged-8ba73130-211b-42d3-b2b1-b1b72ee95433 for instance with vm_state building and task_state spawning. [ 2068.260395] env[62875]: DEBUG nova.compute.manager [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Received event network-changed-8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2068.260602] env[62875]: DEBUG nova.compute.manager [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Refreshing instance network info cache due to event network-changed-8ba73130-211b-42d3-b2b1-b1b72ee95433. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2068.260860] env[62875]: DEBUG oslo_concurrency.lockutils [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] Acquiring lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.323924] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252f9ed-85f4-1596-6dc7-60b7f64a361e, 'name': SearchDatastore_Task, 'duration_secs': 0.011504} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.323924] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.323924] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 9e0aaea6-96cf-494d-9f70-a709a47f9772/9e0aaea6-96cf-494d-9f70-a709a47f9772.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2068.323924] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-095d464e-d2b2-469e-8c90-ce81e8688ef2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.329951] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2068.329951] env[62875]: value = "task-2180095" [ 2068.329951] env[62875]: _type = "Task" [ 2068.329951] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.337979] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.342735] env[62875]: INFO nova.compute.manager [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Took 0.56 seconds to detach 1 volumes for instance. [ 2068.385494] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529ffc50-eb3c-e146-55dd-7079f4046935, 'name': SearchDatastore_Task, 'duration_secs': 0.017355} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.387125] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40810c17-2a81-463f-8e3f-cb7d0a728111 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.393959] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2068.393959] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5264ac5b-c6c2-837d-d564-859d6d9f9a50" [ 2068.393959] env[62875]: _type = "Task" [ 2068.393959] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.403565] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5264ac5b-c6c2-837d-d564-859d6d9f9a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.586041] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.586892] env[62875]: DEBUG nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Instance network_info: |[{"id": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "address": "fa:16:3e:1e:f3:2e", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba73130-21", "ovs_interfaceid": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2068.586892] env[62875]: DEBUG oslo_concurrency.lockutils [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] Acquired lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.586892] env[62875]: DEBUG nova.network.neutron [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Refreshing network info cache for port 8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2068.588575] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:f3:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ba73130-211b-42d3-b2b1-b1b72ee95433', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2068.598455] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Creating folder: Project (95d0f81815ea467cbc1c6160e27409fe). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2068.605724] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-243738a7-aa71-435e-8f83-482b7afa17c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.607689] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12159c50-c736-4096-9a05-4d77c646ec09 tempest-InstanceActionsV221TestJSON-450910855 tempest-InstanceActionsV221TestJSON-450910855-project-member] Lock "a64253fe-4ba9-4686-810b-a26a4c29631b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.775s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.619943] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Created folder: Project (95d0f81815ea467cbc1c6160e27409fe) in parent group-v444854. [ 2068.620234] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Creating folder: Instances. Parent ref: group-v444908. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2068.620442] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5dad8b06-e507-4c6a-a7ec-108f970d93c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.631111] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Created folder: Instances in parent group-v444908. [ 2068.631526] env[62875]: DEBUG oslo.service.loopingcall [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2068.631742] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2068.631962] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4916638-0387-4f94-b014-92f80d1e9698 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.668044] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2068.668044] env[62875]: value = "task-2180098" [ 2068.668044] env[62875]: _type = "Task" [ 2068.668044] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.672827] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180094, 'name': Rename_Task, 'duration_secs': 0.137406} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.673616] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2068.673931] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99d173c1-e96d-41bc-a3c5-dfcda560c326 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.679279] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180098, 'name': CreateVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.685253] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.689866] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2068.689866] env[62875]: value = "task-2180099" [ 2068.689866] env[62875]: _type = "Task" [ 2068.689866] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.706134] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180099, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.797744] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2068.798075] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2068.798247] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2068.798441] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2068.798885] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2068.798885] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2068.798983] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2068.799120] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2068.799292] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2068.799525] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2068.800070] env[62875]: DEBUG nova.virt.hardware [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2068.801047] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b0bbe0-22e0-4511-b1bd-c4359a736ceb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.811701] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f582ca-0d17-464b-9c43-c08e141e925f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.826297] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:37:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78c0725e-6d52-4486-acdf-e95c7a1ae020', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2068.837353] env[62875]: DEBUG oslo.service.loopingcall [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2068.837353] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2068.838330] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4b233b3-5388-42e7-95e7-56617e5c7135 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.854493] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.860088] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180095, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505362} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.861523] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 9e0aaea6-96cf-494d-9f70-a709a47f9772/9e0aaea6-96cf-494d-9f70-a709a47f9772.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2068.861751] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2068.861985] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2068.861985] env[62875]: value = "task-2180100" [ 2068.861985] env[62875]: _type = "Task" [ 2068.861985] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.862190] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e8d20eb-c9d7-4dfc-a539-a6bb1beba7f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.872779] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180100, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.874417] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2068.874417] env[62875]: value = "task-2180101" [ 2068.874417] env[62875]: _type = "Task" [ 2068.874417] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.883418] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180101, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.904895] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5264ac5b-c6c2-837d-d564-859d6d9f9a50, 'name': SearchDatastore_Task, 'duration_secs': 0.01308} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.907925] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.907925] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 2106a09b-554e-41dd-aa3a-c190b62d0afc/2106a09b-554e-41dd-aa3a-c190b62d0afc.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2068.907925] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eaebb53d-1f32-4018-b009-e2ceb2383e80 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.914304] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2068.914304] env[62875]: value = "task-2180102" [ 2068.914304] env[62875]: _type = "Task" [ 2068.914304] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.921463] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.965777] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "816e0ecb-6476-49bb-9fea-a01067f25b51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.965777] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "816e0ecb-6476-49bb-9fea-a01067f25b51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.965777] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "816e0ecb-6476-49bb-9fea-a01067f25b51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.965937] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "816e0ecb-6476-49bb-9fea-a01067f25b51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.966027] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "816e0ecb-6476-49bb-9fea-a01067f25b51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.968529] env[62875]: INFO nova.compute.manager [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Terminating instance [ 2069.175892] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180098, 'name': CreateVM_Task, 'duration_secs': 0.487836} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.178633] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2069.179746] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.179859] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.180382] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2069.180630] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9b1f920-66af-4aa4-b0eb-eb84441840ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.186262] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2069.186262] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52512d3b-2143-2a2a-e847-19fe90319430" [ 2069.186262] env[62875]: _type = "Task" [ 2069.186262] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.202541] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52512d3b-2143-2a2a-e847-19fe90319430, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.210232] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180099, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.383311] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180100, 'name': CreateVM_Task, 'duration_secs': 0.442951} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.383311] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2069.386541] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.394865] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180101, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070474} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.395370] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2069.396362] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096e5ea7-d7a8-4485-93f8-822e38271f79 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.425200] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 9e0aaea6-96cf-494d-9f70-a709a47f9772/9e0aaea6-96cf-494d-9f70-a709a47f9772.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2069.431819] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1374897a-06f8-451a-a6be-a81662f84817 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.447090] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caeed969-d231-4d54-8d5f-116d7b8cd5b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.459236] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180102, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.461995] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2069.461995] env[62875]: value = "task-2180103" [ 2069.461995] env[62875]: _type = "Task" [ 2069.461995] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.463012] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc5fe91-5ce1-4dc0-887f-824a7919728b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.475104] env[62875]: DEBUG nova.compute.manager [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2069.475367] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2069.475630] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.501740] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73b6575-371d-4417-b4e6-34230c6bee4c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.508144] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c448e8a-6231-4cb6-b7c0-913f96e14752 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.522983] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2069.522983] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5473f27a-de7e-43c0-a9bb-30b6d7c5ab10 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.524505] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16a5df2-59fc-421e-9656-e4f0223ddb56 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.540799] env[62875]: DEBUG nova.compute.provider_tree [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2069.546024] env[62875]: DEBUG oslo_vmware.api [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2069.546024] env[62875]: value = "task-2180104" [ 2069.546024] env[62875]: _type = "Task" [ 2069.546024] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.553209] env[62875]: DEBUG oslo_vmware.api [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.581297] env[62875]: DEBUG nova.network.neutron [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updated VIF entry in instance network info cache for port 8ba73130-211b-42d3-b2b1-b1b72ee95433. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2069.581663] env[62875]: DEBUG nova.network.neutron [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updating instance_info_cache with network_info: [{"id": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "address": "fa:16:3e:1e:f3:2e", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba73130-21", "ovs_interfaceid": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.705393] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52512d3b-2143-2a2a-e847-19fe90319430, 'name': SearchDatastore_Task, 'duration_secs': 0.020027} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.709088] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.709581] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2069.709927] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.710139] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.710337] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2069.710650] env[62875]: DEBUG oslo_vmware.api [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180099, 'name': PowerOnVM_Task, 'duration_secs': 0.614191} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.711241] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.711593] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2069.712120] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-901bdf24-ef67-434d-ba94-9fb3ae7ffb12 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.714751] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2069.715082] env[62875]: INFO nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Took 8.33 seconds to spawn the instance on the hypervisor. [ 2069.715196] env[62875]: DEBUG nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2069.715672] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee39a6a7-7e0d-4985-99f1-803fe5e75ddf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.718768] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c13df01-b933-4fdc-8406-4b2bc286528d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.725312] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2069.725312] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523b3290-0293-dad7-f5c3-31e52da7786d" [ 2069.725312] env[62875]: _type = "Task" [ 2069.725312] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.734165] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2069.734359] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2069.735306] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c6f2310-86ef-4432-b5fe-b9e2ab27202e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.745575] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523b3290-0293-dad7-f5c3-31e52da7786d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.746667] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2069.746667] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5294dbfa-9779-aa0e-1093-e261760d0560" [ 2069.746667] env[62875]: _type = "Task" [ 2069.746667] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.754369] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5294dbfa-9779-aa0e-1093-e261760d0560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.926901] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.653975} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.927249] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 2106a09b-554e-41dd-aa3a-c190b62d0afc/2106a09b-554e-41dd-aa3a-c190b62d0afc.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2069.929023] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2069.929023] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f02a4b9-daa6-4478-bb9b-dd00f0652161 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.936623] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2069.936623] env[62875]: value = "task-2180105" [ 2069.936623] env[62875]: _type = "Task" [ 2069.936623] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.951822] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180105, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.977507] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180103, 'name': ReconfigVM_Task, 'duration_secs': 0.443693} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.977838] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 9e0aaea6-96cf-494d-9f70-a709a47f9772/9e0aaea6-96cf-494d-9f70-a709a47f9772.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2069.979052] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f057c3c0-4a99-4430-acaa-545122a0db77 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.986488] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2069.986488] env[62875]: value = "task-2180106" [ 2069.986488] env[62875]: _type = "Task" [ 2069.986488] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.997624] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180106, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.046253] env[62875]: DEBUG nova.scheduler.client.report [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2070.067888] env[62875]: DEBUG oslo_vmware.api [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180104, 'name': PowerOffVM_Task, 'duration_secs': 0.190886} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.068461] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2070.068775] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2070.069177] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-698f77ad-c262-436a-bafe-21c9b52bce68 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.087535] env[62875]: DEBUG oslo_concurrency.lockutils [req-d3debdd6-6178-4a6a-aa06-f7865280ae5c req-e4ee7702-2210-4f3e-8b87-95476cf9e684 service nova] Releasing lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.147504] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2070.147782] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2070.147965] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleting the datastore file [datastore2] 816e0ecb-6476-49bb-9fea-a01067f25b51 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2070.148236] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba4cac37-7ac9-408f-9dce-4495789c6bbb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.155060] env[62875]: DEBUG oslo_vmware.api [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2070.155060] env[62875]: value = "task-2180108" [ 2070.155060] env[62875]: _type = "Task" [ 2070.155060] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.163539] env[62875]: DEBUG oslo_vmware.api [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.241705] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523b3290-0293-dad7-f5c3-31e52da7786d, 'name': SearchDatastore_Task, 'duration_secs': 0.028368} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.242042] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.242290] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2070.242509] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.245811] env[62875]: INFO nova.compute.manager [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Took 35.21 seconds to build instance. [ 2070.261074] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5294dbfa-9779-aa0e-1093-e261760d0560, 'name': SearchDatastore_Task, 'duration_secs': 0.019205} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.266035] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0c13e30-6f0d-4068-9dd0-5c62a276d02d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.269904] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2070.269904] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5229ccd4-baed-c108-2f13-d86f2bdb0f93" [ 2070.269904] env[62875]: _type = "Task" [ 2070.269904] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.278971] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5229ccd4-baed-c108-2f13-d86f2bdb0f93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.448801] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065924} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.449195] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2070.452016] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71932044-0081-4ad0-9572-0e6c5409860f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.485950] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 2106a09b-554e-41dd-aa3a-c190b62d0afc/2106a09b-554e-41dd-aa3a-c190b62d0afc.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2070.486341] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2ac6f62-45c0-44a2-971a-6ac4b629ce36 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.513335] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180106, 'name': Rename_Task, 'duration_secs': 0.135587} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.514788] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2070.515219] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2070.515219] env[62875]: value = "task-2180109" [ 2070.515219] env[62875]: _type = "Task" [ 2070.515219] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.515596] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d85c830a-5735-4f2a-a5b7-b94a437014b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.525901] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180109, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.527150] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2070.527150] env[62875]: value = "task-2180110" [ 2070.527150] env[62875]: _type = "Task" [ 2070.527150] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.537525] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180110, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.564127] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.564127] env[62875]: DEBUG nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2070.566083] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.737s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2070.567982] env[62875]: INFO nova.compute.claims [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2070.667765] env[62875]: DEBUG oslo_vmware.api [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.749872] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f9fba735-c3aa-4dd6-89e5-8eee5fb2666c tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "2cf54268-5499-49c9-8029-68b3866581d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.771s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.781704] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5229ccd4-baed-c108-2f13-d86f2bdb0f93, 'name': SearchDatastore_Task, 'duration_secs': 0.049552} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.781961] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.782335] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 67ac6916-04f3-4eb8-b7da-37a5b28b50d9/67ac6916-04f3-4eb8-b7da-37a5b28b50d9.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2070.782546] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2070.782730] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2070.782937] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d633a63-2439-40c9-a40e-56033ce216c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.784924] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-977b2f79-890f-4e03-bbdd-eb42867882e6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.791664] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2070.791664] env[62875]: value = "task-2180111" [ 2070.791664] env[62875]: _type = "Task" [ 2070.791664] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.795768] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2070.795959] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2070.797838] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd3d3bc6-4222-4506-8377-7b8006679392 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.804400] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.807380] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2070.807380] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522aaa0a-9902-387b-6927-0f5085fee335" [ 2070.807380] env[62875]: _type = "Task" [ 2070.807380] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.817290] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522aaa0a-9902-387b-6927-0f5085fee335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.031885] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180109, 'name': ReconfigVM_Task, 'duration_secs': 0.39811} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.035472] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 2106a09b-554e-41dd-aa3a-c190b62d0afc/2106a09b-554e-41dd-aa3a-c190b62d0afc.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2071.036476] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7d75c69-def4-4f6b-bc86-fd697adbf392 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.045142] env[62875]: DEBUG oslo_vmware.api [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180110, 'name': PowerOnVM_Task, 'duration_secs': 0.461391} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.046861] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2071.047181] env[62875]: INFO nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Took 7.28 seconds to spawn the instance on the hypervisor. [ 2071.047376] env[62875]: DEBUG nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2071.047809] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2071.047809] env[62875]: value = "task-2180112" [ 2071.047809] env[62875]: _type = "Task" [ 2071.047809] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.048636] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d85bb7-7409-458b-a6e1-fa47893eb7de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.066199] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180112, 'name': Rename_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.073145] env[62875]: DEBUG nova.compute.utils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2071.078125] env[62875]: DEBUG nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2071.078125] env[62875]: DEBUG nova.network.neutron [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2071.156682] env[62875]: DEBUG nova.policy [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '254af03bbd2049588c93988d3f5c62b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b5b23e271c6472f893db30de9916fbf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2071.171023] env[62875]: DEBUG oslo_vmware.api [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.613592} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.171356] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2071.171553] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2071.172500] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2071.172500] env[62875]: INFO nova.compute.manager [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Took 1.70 seconds to destroy the instance on the hypervisor. [ 2071.172500] env[62875]: DEBUG oslo.service.loopingcall [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2071.172621] env[62875]: DEBUG nova.compute.manager [-] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2071.172649] env[62875]: DEBUG nova.network.neutron [-] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2071.252863] env[62875]: DEBUG nova.compute.manager [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2071.303797] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180111, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.318666] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522aaa0a-9902-387b-6927-0f5085fee335, 'name': SearchDatastore_Task, 'duration_secs': 0.019963} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.321498] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f45f712-6466-4c25-a721-5ea4fa1ac621 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.327985] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2071.327985] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5270eadb-a8be-03e3-0674-ad9e251d02ad" [ 2071.327985] env[62875]: _type = "Task" [ 2071.327985] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.337851] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5270eadb-a8be-03e3-0674-ad9e251d02ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.564312] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180112, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.582148] env[62875]: DEBUG nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2071.589438] env[62875]: INFO nova.compute.manager [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Took 33.91 seconds to build instance. [ 2071.632735] env[62875]: DEBUG nova.compute.manager [req-704e5265-aa83-4146-b1b4-008b4446d8c9 req-1184d3e1-3e3c-4b54-803a-c02597c489f9 service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Received event network-vif-deleted-19bc9bc7-4c85-4add-a788-b2b0b2376185 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2071.633064] env[62875]: INFO nova.compute.manager [req-704e5265-aa83-4146-b1b4-008b4446d8c9 req-1184d3e1-3e3c-4b54-803a-c02597c489f9 service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Neutron deleted interface 19bc9bc7-4c85-4add-a788-b2b0b2376185; detaching it from the instance and deleting it from the info cache [ 2071.633182] env[62875]: DEBUG nova.network.neutron [req-704e5265-aa83-4146-b1b4-008b4446d8c9 req-1184d3e1-3e3c-4b54-803a-c02597c489f9 service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2071.782183] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.791850] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd6a42ed-a714-4a74-80e7-58716cd3ce83 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "interface-2cf54268-5499-49c9-8029-68b3866581d0-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.792147] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd6a42ed-a714-4a74-80e7-58716cd3ce83 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "interface-2cf54268-5499-49c9-8029-68b3866581d0-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.792492] env[62875]: DEBUG nova.objects.instance [None req-bd6a42ed-a714-4a74-80e7-58716cd3ce83 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lazy-loading 'flavor' on Instance uuid 2cf54268-5499-49c9-8029-68b3866581d0 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2071.815057] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180111, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715742} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.815057] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 67ac6916-04f3-4eb8-b7da-37a5b28b50d9/67ac6916-04f3-4eb8-b7da-37a5b28b50d9.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2071.815217] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2071.815422] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65eae175-d7b3-4409-a3e7-e8e2d001b369 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.822783] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2071.822783] env[62875]: value = "task-2180113" [ 2071.822783] env[62875]: _type = "Task" [ 2071.822783] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.838691] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.845736] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5270eadb-a8be-03e3-0674-ad9e251d02ad, 'name': SearchDatastore_Task, 'duration_secs': 0.057082} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.849084] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2071.849364] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 8361611a-ad16-43ef-94e0-f2e7e9851682/8361611a-ad16-43ef-94e0-f2e7e9851682.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2071.850306] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3d5c113-7e49-47c9-a4f6-5c28a0e5474d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.856996] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2071.856996] env[62875]: value = "task-2180114" [ 2071.856996] env[62875]: _type = "Task" [ 2071.856996] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.867650] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.916703] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48e1af9-2ac1-46f4-bace-3cdc0809b940 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.924700] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99f211a-298b-4bc4-9bf0-9d073338d1e7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.965275] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3fbc92-c326-446f-bdc8-e774fc847a62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.974522] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbc9f6a-6162-43d6-b474-4854ebd53c8f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.989173] env[62875]: DEBUG nova.compute.provider_tree [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2072.026786] env[62875]: DEBUG nova.network.neutron [-] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2072.064367] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180112, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.087982] env[62875]: DEBUG nova.network.neutron [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Successfully created port: 4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2072.093803] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1fcafee8-df85-4817-b325-a11c771e331c tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "9e0aaea6-96cf-494d-9f70-a709a47f9772" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.357s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.135735] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99954cb8-bfef-4db0-99df-7f13957708b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.146579] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a42298-6027-4731-b726-5fd72735303b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.179114] env[62875]: DEBUG nova.compute.manager [req-704e5265-aa83-4146-b1b4-008b4446d8c9 req-1184d3e1-3e3c-4b54-803a-c02597c489f9 service nova] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Detach interface failed, port_id=19bc9bc7-4c85-4add-a788-b2b0b2376185, reason: Instance 816e0ecb-6476-49bb-9fea-a01067f25b51 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2072.307918] env[62875]: DEBUG nova.objects.instance [None req-bd6a42ed-a714-4a74-80e7-58716cd3ce83 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lazy-loading 'pci_requests' on Instance uuid 2cf54268-5499-49c9-8029-68b3866581d0 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2072.337293] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067831} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.337636] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2072.338501] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd82d89-6910-4043-95a9-1f31eea5ff30 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.363573] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 67ac6916-04f3-4eb8-b7da-37a5b28b50d9/67ac6916-04f3-4eb8-b7da-37a5b28b50d9.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2072.364119] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bfa93f3-7e9c-46b5-9020-b78dc268ad4d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.393276] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180114, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.395078] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2072.395078] env[62875]: value = "task-2180115" [ 2072.395078] env[62875]: _type = "Task" [ 2072.395078] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.405792] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180115, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.492424] env[62875]: DEBUG nova.scheduler.client.report [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2072.529188] env[62875]: INFO nova.compute.manager [-] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Took 1.36 seconds to deallocate network for instance. [ 2072.566793] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180112, 'name': Rename_Task, 'duration_secs': 1.166614} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.567943] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2072.568041] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37c6edcf-639a-413c-b41f-4cc7a0164d53 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.575484] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2072.575484] env[62875]: value = "task-2180116" [ 2072.575484] env[62875]: _type = "Task" [ 2072.575484] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.584731] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.596411] env[62875]: DEBUG nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2072.598950] env[62875]: DEBUG nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2072.631310] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2072.631634] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2072.631845] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2072.632202] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2072.632422] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2072.632605] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2072.632918] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2072.633689] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2072.634124] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2072.634356] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2072.634614] env[62875]: DEBUG nova.virt.hardware [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2072.635826] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f1392e-b59b-474a-84bf-24cf749d0c9e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.647695] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96c1e2f-2efb-442b-b854-17630646058f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.810953] env[62875]: DEBUG nova.objects.base [None req-bd6a42ed-a714-4a74-80e7-58716cd3ce83 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Object Instance<2cf54268-5499-49c9-8029-68b3866581d0> lazy-loaded attributes: flavor,pci_requests {{(pid=62875) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2072.811483] env[62875]: DEBUG nova.network.neutron [None req-bd6a42ed-a714-4a74-80e7-58716cd3ce83 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2072.876706] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180114, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666967} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.877038] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 8361611a-ad16-43ef-94e0-f2e7e9851682/8361611a-ad16-43ef-94e0-f2e7e9851682.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2072.878510] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2072.879499] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-219df4e7-815c-4996-abdb-c40122a0a4a3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.888332] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2072.888332] env[62875]: value = "task-2180117" [ 2072.888332] env[62875]: _type = "Task" [ 2072.888332] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.897336] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180117, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.901324] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd6a42ed-a714-4a74-80e7-58716cd3ce83 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "interface-2cf54268-5499-49c9-8029-68b3866581d0-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.109s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.911654] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.998052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.999241] env[62875]: DEBUG nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2073.001295] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.816s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2073.004472] env[62875]: INFO nova.compute.claims [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2073.037243] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.086068] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180116, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.125407] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.398163] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180117, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076971} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.401176] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2073.401906] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c491d37-bc4b-470f-9eeb-6eec6911d680 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.410964] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180115, 'name': ReconfigVM_Task, 'duration_secs': 0.933301} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.421031] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 67ac6916-04f3-4eb8-b7da-37a5b28b50d9/67ac6916-04f3-4eb8-b7da-37a5b28b50d9.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2073.430559] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 8361611a-ad16-43ef-94e0-f2e7e9851682/8361611a-ad16-43ef-94e0-f2e7e9851682.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2073.431428] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c84091db-2d74-45cb-9c7f-d7ffc904e97d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.433914] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b11abe4e-f939-4469-882c-d9cd8a457e10 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.454893] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2073.454893] env[62875]: value = "task-2180119" [ 2073.454893] env[62875]: _type = "Task" [ 2073.454893] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.456151] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2073.456151] env[62875]: value = "task-2180118" [ 2073.456151] env[62875]: _type = "Task" [ 2073.456151] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.468335] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180119, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.471615] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180118, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.508553] env[62875]: DEBUG nova.compute.utils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2073.512804] env[62875]: DEBUG nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2073.513011] env[62875]: DEBUG nova.network.neutron [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2073.566129] env[62875]: DEBUG nova.policy [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c58a186e6eb4453381f8abf978fe79cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c10385cbf8b248d48c7405dc542bde9d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2073.587176] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180116, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.970588] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180119, 'name': ReconfigVM_Task, 'duration_secs': 0.50139} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.973628] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 8361611a-ad16-43ef-94e0-f2e7e9851682/8361611a-ad16-43ef-94e0-f2e7e9851682.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2073.974318] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180118, 'name': Rename_Task, 'duration_secs': 0.251359} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.974525] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7fd02931-ab55-48a9-835a-62f4014754f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.976262] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2073.976438] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b22cd027-d2c5-4a48-953b-5314eb8a9649 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.983098] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2073.983098] env[62875]: value = "task-2180121" [ 2073.983098] env[62875]: _type = "Task" [ 2073.983098] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.984372] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2073.984372] env[62875]: value = "task-2180120" [ 2073.984372] env[62875]: _type = "Task" [ 2073.984372] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.995092] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180121, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.997913] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.013729] env[62875]: DEBUG nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2074.017060] env[62875]: DEBUG nova.network.neutron [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Successfully created port: 7632ec90-6680-4447-b3c3-b39afcc641b3 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2074.093010] env[62875]: DEBUG oslo_vmware.api [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180116, 'name': PowerOnVM_Task, 'duration_secs': 1.114987} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.093316] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2074.093518] env[62875]: INFO nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Took 17.56 seconds to spawn the instance on the hypervisor. [ 2074.093700] env[62875]: DEBUG nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2074.094583] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291de460-5a93-4151-be99-d39610165811 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.222121] env[62875]: DEBUG nova.compute.manager [req-ce7d1ddd-6534-453d-83d0-1355e120e421 req-a9e1e85f-4ba4-451c-ae23-e6f1cee207ab service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Received event network-changed-c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2074.222372] env[62875]: DEBUG nova.compute.manager [req-ce7d1ddd-6534-453d-83d0-1355e120e421 req-a9e1e85f-4ba4-451c-ae23-e6f1cee207ab service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Refreshing instance network info cache due to event network-changed-c4a5613b-5345-49d7-b791-29a0dbe58ed2. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2074.222602] env[62875]: DEBUG oslo_concurrency.lockutils [req-ce7d1ddd-6534-453d-83d0-1355e120e421 req-a9e1e85f-4ba4-451c-ae23-e6f1cee207ab service nova] Acquiring lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2074.222842] env[62875]: DEBUG oslo_concurrency.lockutils [req-ce7d1ddd-6534-453d-83d0-1355e120e421 req-a9e1e85f-4ba4-451c-ae23-e6f1cee207ab service nova] Acquired lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2074.222913] env[62875]: DEBUG nova.network.neutron [req-ce7d1ddd-6534-453d-83d0-1355e120e421 req-a9e1e85f-4ba4-451c-ae23-e6f1cee207ab service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Refreshing network info cache for port c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2074.337506] env[62875]: DEBUG nova.network.neutron [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Successfully updated port: 4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2074.360026] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9667f8be-0560-47c6-967d-386e4b961630 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.368463] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fdd194-9991-44cc-a19d-87a999e19e0c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.403979] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01747a39-c5e8-4656-8412-19596540bee4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.413496] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8562932f-2d0c-45b6-bc36-909b22fd1887 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.428735] env[62875]: DEBUG nova.compute.provider_tree [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2074.504475] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180121, 'name': Rename_Task, 'duration_secs': 0.311819} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.504737] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180120, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.505174] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2074.505539] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba581a70-bf98-4fbe-a3fd-459c308761fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.512853] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2074.512853] env[62875]: value = "task-2180122" [ 2074.512853] env[62875]: _type = "Task" [ 2074.512853] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.520960] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180122, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.570300] env[62875]: DEBUG nova.compute.manager [req-cfee3af3-dd16-4388-8474-5669f8a19ef5 req-f6c19a75-2929-4f99-a1de-fe36c2223c0c service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Received event network-vif-plugged-4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2074.570507] env[62875]: DEBUG oslo_concurrency.lockutils [req-cfee3af3-dd16-4388-8474-5669f8a19ef5 req-f6c19a75-2929-4f99-a1de-fe36c2223c0c service nova] Acquiring lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.570767] env[62875]: DEBUG oslo_concurrency.lockutils [req-cfee3af3-dd16-4388-8474-5669f8a19ef5 req-f6c19a75-2929-4f99-a1de-fe36c2223c0c service nova] Lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.570971] env[62875]: DEBUG oslo_concurrency.lockutils [req-cfee3af3-dd16-4388-8474-5669f8a19ef5 req-f6c19a75-2929-4f99-a1de-fe36c2223c0c service nova] Lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.571072] env[62875]: DEBUG nova.compute.manager [req-cfee3af3-dd16-4388-8474-5669f8a19ef5 req-f6c19a75-2929-4f99-a1de-fe36c2223c0c service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] No waiting events found dispatching network-vif-plugged-4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2074.571240] env[62875]: WARNING nova.compute.manager [req-cfee3af3-dd16-4388-8474-5669f8a19ef5 req-f6c19a75-2929-4f99-a1de-fe36c2223c0c service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Received unexpected event network-vif-plugged-4d852812-d3cb-456e-992b-fe1bb2ebc36e for instance with vm_state building and task_state spawning. [ 2074.617237] env[62875]: INFO nova.compute.manager [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Took 41.62 seconds to build instance. [ 2074.840227] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquiring lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2074.840549] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquired lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2074.840745] env[62875]: DEBUG nova.network.neutron [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2074.934096] env[62875]: DEBUG nova.scheduler.client.report [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2074.999385] env[62875]: DEBUG oslo_vmware.api [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180120, 'name': PowerOnVM_Task, 'duration_secs': 0.951639} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.999848] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2075.000037] env[62875]: INFO nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Took 8.83 seconds to spawn the instance on the hypervisor. [ 2075.000134] env[62875]: DEBUG nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2075.000881] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e396c0-61ed-4d2a-be19-2c2246929f78 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.034383] env[62875]: DEBUG nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2075.037263] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180122, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.077151] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2075.077533] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2075.077533] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2075.077781] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2075.077930] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2075.078036] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2075.078215] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2075.078499] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2075.078586] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2075.078683] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2075.078861] env[62875]: DEBUG nova.virt.hardware [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2075.079891] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57afcbf0-9896-46c1-af0b-ccfc8977fe4e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.088942] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36cc7a7-d44a-432f-a4d0-b1a5e52264f4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.123436] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e246aea0-badc-47b8-b3df-8a3cad3be42f tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.830s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.287851] env[62875]: DEBUG nova.network.neutron [req-ce7d1ddd-6534-453d-83d0-1355e120e421 req-a9e1e85f-4ba4-451c-ae23-e6f1cee207ab service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Updated VIF entry in instance network info cache for port c4a5613b-5345-49d7-b791-29a0dbe58ed2. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2075.288279] env[62875]: DEBUG nova.network.neutron [req-ce7d1ddd-6534-453d-83d0-1355e120e421 req-a9e1e85f-4ba4-451c-ae23-e6f1cee207ab service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Updating instance_info_cache with network_info: [{"id": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "address": "fa:16:3e:8b:f8:16", "network": {"id": "86c8f999-67e2-4985-82bb-3f9c44f0fbb8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1235728423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48cead7352349dbab0d47c19e048eae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a5613b-53", "ovs_interfaceid": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2075.426807] env[62875]: DEBUG nova.network.neutron [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2075.439698] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.440337] env[62875]: DEBUG nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2075.446474] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.175s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.446718] env[62875]: DEBUG nova.objects.instance [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lazy-loading 'resources' on Instance uuid c1e107cd-5c03-405f-bdae-3281dc4844d5 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2075.494363] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "2cf54268-5499-49c9-8029-68b3866581d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.494797] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "2cf54268-5499-49c9-8029-68b3866581d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.494797] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "2cf54268-5499-49c9-8029-68b3866581d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.494957] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "2cf54268-5499-49c9-8029-68b3866581d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.495773] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "2cf54268-5499-49c9-8029-68b3866581d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.499577] env[62875]: INFO nova.compute.manager [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Terminating instance [ 2075.529483] env[62875]: DEBUG oslo_vmware.api [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180122, 'name': PowerOnVM_Task, 'duration_secs': 0.611903} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.529483] env[62875]: INFO nova.compute.manager [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Took 37.44 seconds to build instance. [ 2075.530351] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2075.530556] env[62875]: DEBUG nova.compute.manager [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2075.531391] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8caacfa-21a4-4064-ad90-a8adb882500e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.659158] env[62875]: DEBUG nova.network.neutron [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Successfully updated port: 7632ec90-6680-4447-b3c3-b39afcc641b3 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2075.792839] env[62875]: DEBUG oslo_concurrency.lockutils [req-ce7d1ddd-6534-453d-83d0-1355e120e421 req-a9e1e85f-4ba4-451c-ae23-e6f1cee207ab service nova] Releasing lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2075.834356] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "2106a09b-554e-41dd-aa3a-c190b62d0afc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.834630] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.834853] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.835056] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.835233] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.837344] env[62875]: INFO nova.compute.manager [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Terminating instance [ 2075.933628] env[62875]: DEBUG nova.network.neutron [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Updating instance_info_cache with network_info: [{"id": "4d852812-d3cb-456e-992b-fe1bb2ebc36e", "address": "fa:16:3e:50:e2:e2", "network": {"id": "3df89f88-8add-4870-a7b8-609ae5e1e872", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1644883682-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b5b23e271c6472f893db30de9916fbf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d852812-d3", "ovs_interfaceid": "4d852812-d3cb-456e-992b-fe1bb2ebc36e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2075.952326] env[62875]: DEBUG nova.compute.utils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2075.955442] env[62875]: DEBUG nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2075.955728] env[62875]: DEBUG nova.network.neutron [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2076.006017] env[62875]: DEBUG nova.compute.manager [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2076.006017] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2076.006017] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf78a1df-db0d-48a7-80c6-b23dc0708f0d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.010463] env[62875]: DEBUG nova.policy [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e79993abf5eb47cc8449e3468d3cdd4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bef7d358bb2746efb448dbf759cac58c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2076.021018] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2076.021018] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ad1b2ad-a762-4f05-ae41-6db177ea23a0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.028776] env[62875]: DEBUG oslo_vmware.api [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2076.028776] env[62875]: value = "task-2180123" [ 2076.028776] env[62875]: _type = "Task" [ 2076.028776] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.041125] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1769da68-0329-4363-a882-a31b6fd152c5 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.974s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.041125] env[62875]: DEBUG oslo_vmware.api [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.053673] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.162023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquiring lock "refresh_cache-c217e435-c5d8-406b-99ee-ec71580fb344" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.162181] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquired lock "refresh_cache-c217e435-c5d8-406b-99ee-ec71580fb344" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.162905] env[62875]: DEBUG nova.network.neutron [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2076.258359] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41017c08-e131-4b27-8025-fdc96796b637 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.267676] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e0f9f2-cd30-474c-8db7-dc867f8088fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.306178] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38f27e2-f732-4077-ac0a-db73d5faeea2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.314437] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9acd17d-d6d6-4047-a9d0-539ff41cec4b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.332334] env[62875]: DEBUG nova.compute.provider_tree [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2076.341864] env[62875]: DEBUG nova.compute.manager [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2076.342671] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2076.342960] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638e3c84-c6ed-4ac1-9c6f-3f10e927580b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.351529] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2076.351777] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de1e5daf-decc-4343-9980-f6b2febf6a12 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.359879] env[62875]: DEBUG oslo_vmware.api [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2076.359879] env[62875]: value = "task-2180124" [ 2076.359879] env[62875]: _type = "Task" [ 2076.359879] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.369248] env[62875]: DEBUG oslo_vmware.api [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.436986] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Releasing lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2076.437435] env[62875]: DEBUG nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Instance network_info: |[{"id": "4d852812-d3cb-456e-992b-fe1bb2ebc36e", "address": "fa:16:3e:50:e2:e2", "network": {"id": "3df89f88-8add-4870-a7b8-609ae5e1e872", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1644883682-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b5b23e271c6472f893db30de9916fbf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d852812-d3", "ovs_interfaceid": "4d852812-d3cb-456e-992b-fe1bb2ebc36e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2076.437922] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:e2:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d852812-d3cb-456e-992b-fe1bb2ebc36e', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2076.445508] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Creating folder: Project (2b5b23e271c6472f893db30de9916fbf). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2076.445779] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44fcfe82-d2ae-46c3-a0d9-1d221597bc47 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.456402] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Created folder: Project (2b5b23e271c6472f893db30de9916fbf) in parent group-v444854. [ 2076.456636] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Creating folder: Instances. Parent ref: group-v444912. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2076.456886] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acbfc339-80ea-4bd7-b421-13c9ded1f9ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.458830] env[62875]: DEBUG nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2076.468592] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Created folder: Instances in parent group-v444912. [ 2076.468855] env[62875]: DEBUG oslo.service.loopingcall [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2076.469062] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2076.469268] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5cc3f4a8-b8a7-4adc-adc9-3b27477bff1e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.490202] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2076.490202] env[62875]: value = "task-2180127" [ 2076.490202] env[62875]: _type = "Task" [ 2076.490202] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.498470] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180127, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.540637] env[62875]: DEBUG oslo_vmware.api [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180123, 'name': PowerOffVM_Task, 'duration_secs': 0.284793} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.540824] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2076.541019] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2076.541304] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2f1848d-1252-4791-8939-0ecc3f19474a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.607744] env[62875]: DEBUG nova.network.neutron [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Successfully created port: a7a79e94-603f-457d-a72b-08c0228a924b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2076.654704] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2076.654942] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2076.655301] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Deleting the datastore file [datastore2] 2cf54268-5499-49c9-8029-68b3866581d0 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2076.655604] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d14e8020-ca24-40dc-b0b4-b32e53e842eb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.662023] env[62875]: DEBUG oslo_vmware.api [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for the task: (returnval){ [ 2076.662023] env[62875]: value = "task-2180129" [ 2076.662023] env[62875]: _type = "Task" [ 2076.662023] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.674068] env[62875]: DEBUG oslo_vmware.api [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.714868] env[62875]: DEBUG nova.network.neutron [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2076.826175] env[62875]: DEBUG nova.compute.manager [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Received event network-vif-plugged-7632ec90-6680-4447-b3c3-b39afcc641b3 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2076.826391] env[62875]: DEBUG oslo_concurrency.lockutils [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] Acquiring lock "c217e435-c5d8-406b-99ee-ec71580fb344-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.826597] env[62875]: DEBUG oslo_concurrency.lockutils [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] Lock "c217e435-c5d8-406b-99ee-ec71580fb344-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.827102] env[62875]: DEBUG oslo_concurrency.lockutils [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] Lock "c217e435-c5d8-406b-99ee-ec71580fb344-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.827102] env[62875]: DEBUG nova.compute.manager [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] No waiting events found dispatching network-vif-plugged-7632ec90-6680-4447-b3c3-b39afcc641b3 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2076.827252] env[62875]: WARNING nova.compute.manager [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Received unexpected event network-vif-plugged-7632ec90-6680-4447-b3c3-b39afcc641b3 for instance with vm_state building and task_state spawning. [ 2076.827434] env[62875]: DEBUG nova.compute.manager [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Received event network-changed-7632ec90-6680-4447-b3c3-b39afcc641b3 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2076.828133] env[62875]: DEBUG nova.compute.manager [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Refreshing instance network info cache due to event network-changed-7632ec90-6680-4447-b3c3-b39afcc641b3. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2076.828493] env[62875]: DEBUG oslo_concurrency.lockutils [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] Acquiring lock "refresh_cache-c217e435-c5d8-406b-99ee-ec71580fb344" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.837902] env[62875]: DEBUG nova.scheduler.client.report [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2076.879739] env[62875]: DEBUG oslo_vmware.api [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180124, 'name': PowerOffVM_Task, 'duration_secs': 0.216993} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.880120] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2076.880540] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2076.880816] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0cd27c79-13e1-4bb2-8729-bb101e3ca1d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.891707] env[62875]: DEBUG nova.network.neutron [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Updating instance_info_cache with network_info: [{"id": "7632ec90-6680-4447-b3c3-b39afcc641b3", "address": "fa:16:3e:63:44:a0", "network": {"id": "fba17d29-3196-4ae4-967c-20996b319d81", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2080049919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c10385cbf8b248d48c7405dc542bde9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7632ec90-66", "ovs_interfaceid": "7632ec90-6680-4447-b3c3-b39afcc641b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2076.992239] env[62875]: DEBUG nova.compute.manager [req-2ba04f9f-503d-4f91-848c-297ce2accb12 req-1a41836d-c2c4-4900-b99b-ac28cd619e2e service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Received event network-changed-4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2076.992633] env[62875]: DEBUG nova.compute.manager [req-2ba04f9f-503d-4f91-848c-297ce2accb12 req-1a41836d-c2c4-4900-b99b-ac28cd619e2e service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Refreshing instance network info cache due to event network-changed-4d852812-d3cb-456e-992b-fe1bb2ebc36e. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2076.992992] env[62875]: DEBUG oslo_concurrency.lockutils [req-2ba04f9f-503d-4f91-848c-297ce2accb12 req-1a41836d-c2c4-4900-b99b-ac28cd619e2e service nova] Acquiring lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.993296] env[62875]: DEBUG oslo_concurrency.lockutils [req-2ba04f9f-503d-4f91-848c-297ce2accb12 req-1a41836d-c2c4-4900-b99b-ac28cd619e2e service nova] Acquired lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.993629] env[62875]: DEBUG nova.network.neutron [req-2ba04f9f-503d-4f91-848c-297ce2accb12 req-1a41836d-c2c4-4900-b99b-ac28cd619e2e service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Refreshing network info cache for port 4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2077.009765] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180127, 'name': CreateVM_Task, 'duration_secs': 0.371341} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.010841] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2077.011556] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.011714] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.012052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2077.012616] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-288b6edf-70d1-4177-9500-aa687fc50529 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.018093] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2077.018093] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528999df-6d95-d087-83ea-fffda085ec5e" [ 2077.018093] env[62875]: _type = "Task" [ 2077.018093] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.026329] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528999df-6d95-d087-83ea-fffda085ec5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.172732] env[62875]: DEBUG oslo_vmware.api [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Task: {'id': task-2180129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155888} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.173228] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2077.173568] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2077.176327] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2077.176327] env[62875]: INFO nova.compute.manager [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2077.176327] env[62875]: DEBUG oslo.service.loopingcall [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.176327] env[62875]: DEBUG nova.compute.manager [-] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2077.176327] env[62875]: DEBUG nova.network.neutron [-] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2077.228147] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2077.228147] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2077.228147] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Deleting the datastore file [datastore2] 2106a09b-554e-41dd-aa3a-c190b62d0afc {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2077.228147] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27f03a26-ff2b-48a2-bb7e-46abb1556077 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.236557] env[62875]: DEBUG oslo_vmware.api [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2077.236557] env[62875]: value = "task-2180131" [ 2077.236557] env[62875]: _type = "Task" [ 2077.236557] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.247151] env[62875]: DEBUG oslo_vmware.api [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.346737] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.350281] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.472s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.351874] env[62875]: DEBUG nova.objects.instance [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lazy-loading 'resources' on Instance uuid 37ae8e69-f953-4846-8a21-fed697ea575a {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2077.377356] env[62875]: INFO nova.scheduler.client.report [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Deleted allocations for instance c1e107cd-5c03-405f-bdae-3281dc4844d5 [ 2077.396349] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Releasing lock "refresh_cache-c217e435-c5d8-406b-99ee-ec71580fb344" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.396349] env[62875]: DEBUG nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Instance network_info: |[{"id": "7632ec90-6680-4447-b3c3-b39afcc641b3", "address": "fa:16:3e:63:44:a0", "network": {"id": "fba17d29-3196-4ae4-967c-20996b319d81", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2080049919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c10385cbf8b248d48c7405dc542bde9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7632ec90-66", "ovs_interfaceid": "7632ec90-6680-4447-b3c3-b39afcc641b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2077.397824] env[62875]: DEBUG oslo_concurrency.lockutils [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] Acquired lock "refresh_cache-c217e435-c5d8-406b-99ee-ec71580fb344" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.398100] env[62875]: DEBUG nova.network.neutron [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Refreshing network info cache for port 7632ec90-6680-4447-b3c3-b39afcc641b3 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2077.399330] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:44:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7632ec90-6680-4447-b3c3-b39afcc641b3', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2077.407649] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Creating folder: Project (c10385cbf8b248d48c7405dc542bde9d). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2077.408219] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce2a919d-c000-46dd-8189-18de54386a49 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.418326] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Created folder: Project (c10385cbf8b248d48c7405dc542bde9d) in parent group-v444854. [ 2077.418537] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Creating folder: Instances. Parent ref: group-v444915. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2077.419130] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11816831-6959-426f-b0d7-f66fb301aaa2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.426238] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "8361611a-ad16-43ef-94e0-f2e7e9851682" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.426498] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "8361611a-ad16-43ef-94e0-f2e7e9851682" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.426694] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "8361611a-ad16-43ef-94e0-f2e7e9851682-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.426882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "8361611a-ad16-43ef-94e0-f2e7e9851682-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.427068] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "8361611a-ad16-43ef-94e0-f2e7e9851682-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.430261] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Created folder: Instances in parent group-v444915. [ 2077.430499] env[62875]: DEBUG oslo.service.loopingcall [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.430960] env[62875]: INFO nova.compute.manager [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Terminating instance [ 2077.432326] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2077.432551] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e7b933a-9398-44bc-a638-e2c15d787a2d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.454545] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2077.454545] env[62875]: value = "task-2180134" [ 2077.454545] env[62875]: _type = "Task" [ 2077.454545] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.465156] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180134, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.475887] env[62875]: DEBUG nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2077.478828] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquiring lock "e811f624-2dda-468c-ab28-9744c300eb1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.479134] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "e811f624-2dda-468c-ab28-9744c300eb1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.500785] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2077.501111] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2077.501335] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2077.501565] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2077.501812] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2077.502101] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2077.502419] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2077.502706] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2077.502984] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2077.507023] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2077.507023] env[62875]: DEBUG nova.virt.hardware [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2077.509010] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09a96dc-3b69-4bd6-913e-87081c9fcf14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.523498] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d51cf7e-eb7d-46c9-b426-a3c894b80e74 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.534948] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528999df-6d95-d087-83ea-fffda085ec5e, 'name': SearchDatastore_Task, 'duration_secs': 0.027653} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.543506] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.543852] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2077.544170] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.544357] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.544574] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2077.546612] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-905d3b7a-e2f5-4e95-b622-4f5b5091f701 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.554935] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2077.555054] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2077.555798] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-366a67c5-755f-49ef-957c-be6dca1a7626 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.561676] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2077.561676] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5266d393-c6ea-4c9b-bbed-d10e1a58d57e" [ 2077.561676] env[62875]: _type = "Task" [ 2077.561676] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.570127] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5266d393-c6ea-4c9b-bbed-d10e1a58d57e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.751131] env[62875]: DEBUG oslo_vmware.api [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306968} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.751270] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2077.751462] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2077.751663] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2077.752125] env[62875]: INFO nova.compute.manager [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Took 1.41 seconds to destroy the instance on the hypervisor. [ 2077.752930] env[62875]: DEBUG oslo.service.loopingcall [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.752930] env[62875]: DEBUG nova.compute.manager [-] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2077.752930] env[62875]: DEBUG nova.network.neutron [-] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2077.855308] env[62875]: DEBUG nova.network.neutron [req-2ba04f9f-503d-4f91-848c-297ce2accb12 req-1a41836d-c2c4-4900-b99b-ac28cd619e2e service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Updated VIF entry in instance network info cache for port 4d852812-d3cb-456e-992b-fe1bb2ebc36e. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2077.856197] env[62875]: DEBUG nova.network.neutron [req-2ba04f9f-503d-4f91-848c-297ce2accb12 req-1a41836d-c2c4-4900-b99b-ac28cd619e2e service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Updating instance_info_cache with network_info: [{"id": "4d852812-d3cb-456e-992b-fe1bb2ebc36e", "address": "fa:16:3e:50:e2:e2", "network": {"id": "3df89f88-8add-4870-a7b8-609ae5e1e872", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1644883682-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b5b23e271c6472f893db30de9916fbf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d852812-d3", "ovs_interfaceid": "4d852812-d3cb-456e-992b-fe1bb2ebc36e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.889873] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e4888a15-a5e0-4140-8b06-ffefa889967c tempest-ServerAddressesNegativeTestJSON-687786459 tempest-ServerAddressesNegativeTestJSON-687786459-project-member] Lock "c1e107cd-5c03-405f-bdae-3281dc4844d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.101s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.950500] env[62875]: DEBUG nova.compute.manager [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2077.950751] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2077.951829] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0532c2aa-6dd1-40cd-923f-1045665d4ef2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.963288] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180134, 'name': CreateVM_Task, 'duration_secs': 0.3214} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.968030] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2077.968030] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2077.969662] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.969880] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.970299] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2077.972145] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aaf8ca60-66eb-4ee8-813a-cbffd80e1d69 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.976174] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4230cbc-24c6-4945-a9ff-f7ddd1bc2b13 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.984105] env[62875]: DEBUG oslo_vmware.api [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2077.984105] env[62875]: value = "task-2180135" [ 2077.984105] env[62875]: _type = "Task" [ 2077.984105] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.984836] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2077.984836] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5263c007-569e-fa27-f626-39900d660ca6" [ 2077.984836] env[62875]: _type = "Task" [ 2077.984836] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.989357] env[62875]: DEBUG nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2078.005596] env[62875]: DEBUG oslo_vmware.api [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.009932] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5263c007-569e-fa27-f626-39900d660ca6, 'name': SearchDatastore_Task, 'duration_secs': 0.015268} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.009932] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.010301] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2078.010528] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.078115] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5266d393-c6ea-4c9b-bbed-d10e1a58d57e, 'name': SearchDatastore_Task, 'duration_secs': 0.011136} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.078115] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fe50b60-01cb-48f7-96db-ce9ab5a9bd78 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.086783] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2078.086783] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5280c7d5-a972-2543-2d14-58d090df018f" [ 2078.086783] env[62875]: _type = "Task" [ 2078.086783] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.094807] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5280c7d5-a972-2543-2d14-58d090df018f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.129058] env[62875]: DEBUG nova.network.neutron [-] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.179057] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aed823e-53a9-481c-a1de-f59cdfb5aba6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.188262] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0935d1d0-2ce7-4995-9eda-8be044d9a32c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.224469] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee1827b-e483-467f-948a-bddffab19587 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.235247] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a81cbd3-43df-495c-8c9f-51e59b859fd9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.248580] env[62875]: DEBUG nova.compute.provider_tree [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2078.362464] env[62875]: DEBUG oslo_concurrency.lockutils [req-2ba04f9f-503d-4f91-848c-297ce2accb12 req-1a41836d-c2c4-4900-b99b-ac28cd619e2e service nova] Releasing lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.425744] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "44a248f6-443c-4b7c-95f0-088f0cdb924d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.425990] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "44a248f6-443c-4b7c-95f0-088f0cdb924d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.503095] env[62875]: DEBUG oslo_vmware.api [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180135, 'name': PowerOffVM_Task, 'duration_secs': 0.372541} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.505464] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2078.505645] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2078.506930] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-130e16ab-da61-4784-a9b3-0aca82c32366 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.519496] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.520586] env[62875]: DEBUG nova.network.neutron [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Updated VIF entry in instance network info cache for port 7632ec90-6680-4447-b3c3-b39afcc641b3. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2078.521080] env[62875]: DEBUG nova.network.neutron [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Updating instance_info_cache with network_info: [{"id": "7632ec90-6680-4447-b3c3-b39afcc641b3", "address": "fa:16:3e:63:44:a0", "network": {"id": "fba17d29-3196-4ae4-967c-20996b319d81", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2080049919-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c10385cbf8b248d48c7405dc542bde9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7632ec90-66", "ovs_interfaceid": "7632ec90-6680-4447-b3c3-b39afcc641b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.597971] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5280c7d5-a972-2543-2d14-58d090df018f, 'name': SearchDatastore_Task, 'duration_secs': 0.017578} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.597971] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.598205] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 70547fbd-7ce8-466e-8abc-b490b8dd6b28/70547fbd-7ce8-466e-8abc-b490b8dd6b28.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2078.598501] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.598774] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2078.598909] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-760a70d0-6e13-4423-acaa-2e10cac6989a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.601184] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54803cf6-64e8-46a3-9c0b-386887ca2202 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.609534] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2078.609534] env[62875]: value = "task-2180137" [ 2078.609534] env[62875]: _type = "Task" [ 2078.609534] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.613691] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2078.613880] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2078.614947] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1637362e-6115-4f67-8257-14da9b90b56f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.620322] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.623178] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2078.623178] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521f2346-d0a7-dd3e-b472-244bb3914392" [ 2078.623178] env[62875]: _type = "Task" [ 2078.623178] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.630434] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521f2346-d0a7-dd3e-b472-244bb3914392, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.630829] env[62875]: INFO nova.compute.manager [-] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Took 1.46 seconds to deallocate network for instance. [ 2078.695565] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2078.695803] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2078.695990] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleting the datastore file [datastore1] 8361611a-ad16-43ef-94e0-f2e7e9851682 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2078.696275] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dcffeed-5468-41ed-9d89-6e59d673a0d1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.703010] env[62875]: DEBUG oslo_vmware.api [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2078.703010] env[62875]: value = "task-2180138" [ 2078.703010] env[62875]: _type = "Task" [ 2078.703010] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.712654] env[62875]: DEBUG oslo_vmware.api [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180138, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.772569] env[62875]: ERROR nova.scheduler.client.report [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] [req-16f4cbdf-573f-4c89-8aea-389567c69a2c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-16f4cbdf-573f-4c89-8aea-389567c69a2c"}]} [ 2078.788810] env[62875]: DEBUG nova.scheduler.client.report [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2078.805633] env[62875]: DEBUG nova.scheduler.client.report [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2078.805879] env[62875]: DEBUG nova.compute.provider_tree [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2078.817622] env[62875]: DEBUG nova.scheduler.client.report [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2078.840171] env[62875]: DEBUG nova.scheduler.client.report [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2078.928729] env[62875]: DEBUG nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2078.982079] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Received event network-vif-deleted-c26e9371-7026-4263-9b30-4e9735c336ac {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2078.982079] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Received event network-changed-8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2078.982079] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Refreshing instance network info cache due to event network-changed-8ba73130-211b-42d3-b2b1-b1b72ee95433. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2078.982539] env[62875]: DEBUG oslo_concurrency.lockutils [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] Acquiring lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.982539] env[62875]: DEBUG oslo_concurrency.lockutils [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] Acquired lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.982539] env[62875]: DEBUG nova.network.neutron [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Refreshing network info cache for port 8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2078.984249] env[62875]: DEBUG nova.network.neutron [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Successfully updated port: a7a79e94-603f-457d-a72b-08c0228a924b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2079.019625] env[62875]: DEBUG nova.network.neutron [-] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.026422] env[62875]: DEBUG oslo_concurrency.lockutils [req-1ed0cfba-cdb0-4620-ab32-76637ad2729f req-104ac253-a943-4a7c-a81a-9393ac5a35a0 service nova] Releasing lock "refresh_cache-c217e435-c5d8-406b-99ee-ec71580fb344" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.030895] env[62875]: DEBUG nova.compute.manager [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Received event network-vif-plugged-a7a79e94-603f-457d-a72b-08c0228a924b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2079.030895] env[62875]: DEBUG oslo_concurrency.lockutils [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] Acquiring lock "409b6902-f9ef-469b-a9db-4e93f764d199-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.030895] env[62875]: DEBUG oslo_concurrency.lockutils [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] Lock "409b6902-f9ef-469b-a9db-4e93f764d199-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.030895] env[62875]: DEBUG oslo_concurrency.lockutils [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] Lock "409b6902-f9ef-469b-a9db-4e93f764d199-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.030895] env[62875]: DEBUG nova.compute.manager [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] No waiting events found dispatching network-vif-plugged-a7a79e94-603f-457d-a72b-08c0228a924b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2079.030895] env[62875]: WARNING nova.compute.manager [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Received unexpected event network-vif-plugged-a7a79e94-603f-457d-a72b-08c0228a924b for instance with vm_state building and task_state spawning. [ 2079.030895] env[62875]: DEBUG nova.compute.manager [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Received event network-changed-a7a79e94-603f-457d-a72b-08c0228a924b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2079.030895] env[62875]: DEBUG nova.compute.manager [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Refreshing instance network info cache due to event network-changed-a7a79e94-603f-457d-a72b-08c0228a924b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2079.030895] env[62875]: DEBUG oslo_concurrency.lockutils [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] Acquiring lock "refresh_cache-409b6902-f9ef-469b-a9db-4e93f764d199" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.030895] env[62875]: DEBUG oslo_concurrency.lockutils [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] Acquired lock "refresh_cache-409b6902-f9ef-469b-a9db-4e93f764d199" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.030895] env[62875]: DEBUG nova.network.neutron [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Refreshing network info cache for port a7a79e94-603f-457d-a72b-08c0228a924b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2079.124911] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180137, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.139063] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.142386] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521f2346-d0a7-dd3e-b472-244bb3914392, 'name': SearchDatastore_Task, 'duration_secs': 0.022118} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.143845] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eb0deb8-f6ab-476e-a8d4-31f46050b7ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.155020] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2079.155020] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dd9c6f-eee0-8f8c-9f23-e9763727074a" [ 2079.155020] env[62875]: _type = "Task" [ 2079.155020] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.161649] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dd9c6f-eee0-8f8c-9f23-e9763727074a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.180318] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a0898c-b43f-4db6-b13c-49d8d483e025 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.192105] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59991774-0beb-4f5b-8cc6-0d24030f5210 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.227141] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42247003-aff6-4f5e-951a-f7ae4238609a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.235076] env[62875]: DEBUG oslo_vmware.api [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180138, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302153} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.237273] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2079.237535] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2079.237790] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2079.238059] env[62875]: INFO nova.compute.manager [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Took 1.29 seconds to destroy the instance on the hypervisor. [ 2079.238703] env[62875]: DEBUG oslo.service.loopingcall [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2079.238703] env[62875]: DEBUG nova.compute.manager [-] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2079.239585] env[62875]: DEBUG nova.network.neutron [-] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2079.241779] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e846e52-417e-4b68-8eee-bb9c0f75abef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.256023] env[62875]: DEBUG nova.compute.provider_tree [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2079.448644] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.491676] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "refresh_cache-409b6902-f9ef-469b-a9db-4e93f764d199" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.527221] env[62875]: INFO nova.compute.manager [-] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Took 1.77 seconds to deallocate network for instance. [ 2079.569375] env[62875]: DEBUG nova.network.neutron [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2079.621306] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645167} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.621533] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 70547fbd-7ce8-466e-8abc-b490b8dd6b28/70547fbd-7ce8-466e-8abc-b490b8dd6b28.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2079.621749] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2079.622014] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ab4e431-89f6-47cd-abec-fafe5446c270 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.628668] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2079.628668] env[62875]: value = "task-2180139" [ 2079.628668] env[62875]: _type = "Task" [ 2079.628668] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.637515] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180139, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.663471] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dd9c6f-eee0-8f8c-9f23-e9763727074a, 'name': SearchDatastore_Task, 'duration_secs': 0.041896} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.663741] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.663992] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] c217e435-c5d8-406b-99ee-ec71580fb344/c217e435-c5d8-406b-99ee-ec71580fb344.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2079.664271] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bba6e5ec-44a5-4cbc-a03f-77f33ef1637c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.671632] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2079.671632] env[62875]: value = "task-2180140" [ 2079.671632] env[62875]: _type = "Task" [ 2079.671632] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.679738] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.713816] env[62875]: DEBUG nova.network.neutron [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.799910] env[62875]: DEBUG nova.scheduler.client.report [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 71 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2079.800231] env[62875]: DEBUG nova.compute.provider_tree [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 71 to 72 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2079.800414] env[62875]: DEBUG nova.compute.provider_tree [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2080.034732] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.140684] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180139, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062375} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.141811] env[62875]: DEBUG nova.network.neutron [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updated VIF entry in instance network info cache for port 8ba73130-211b-42d3-b2b1-b1b72ee95433. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2080.142368] env[62875]: DEBUG nova.network.neutron [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updating instance_info_cache with network_info: [{"id": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "address": "fa:16:3e:1e:f3:2e", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba73130-21", "ovs_interfaceid": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.143698] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2080.144866] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faef8dd6-a09a-48f7-a90c-86be843bad2f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.169208] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 70547fbd-7ce8-466e-8abc-b490b8dd6b28/70547fbd-7ce8-466e-8abc-b490b8dd6b28.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2080.170131] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-776796b2-bb74-4298-a6e9-68a255d570bc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.197633] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.199947] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2080.199947] env[62875]: value = "task-2180141" [ 2080.199947] env[62875]: _type = "Task" [ 2080.199947] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.212025] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180141, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.216660] env[62875]: DEBUG oslo_concurrency.lockutils [req-64c12684-3443-4b98-8d3c-be653546e8e8 req-d64afac5-a79a-4fa1-991a-0ca5ec8b9a2b service nova] Releasing lock "refresh_cache-409b6902-f9ef-469b-a9db-4e93f764d199" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2080.217057] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "refresh_cache-409b6902-f9ef-469b-a9db-4e93f764d199" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2080.217235] env[62875]: DEBUG nova.network.neutron [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2080.305661] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.956s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.308294] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.480s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.309939] env[62875]: INFO nova.compute.claims [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2080.325892] env[62875]: DEBUG nova.network.neutron [-] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.330169] env[62875]: INFO nova.scheduler.client.report [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Deleted allocations for instance 37ae8e69-f953-4846-8a21-fed697ea575a [ 2080.650760] env[62875]: DEBUG oslo_concurrency.lockutils [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] Releasing lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2080.651096] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-vif-deleted-910a7aba-2677-4608-af5a-5efee055c3aa {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2080.651309] env[62875]: INFO nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Neutron deleted interface 910a7aba-2677-4608-af5a-5efee055c3aa; detaching it from the instance and deleting it from the info cache [ 2080.651604] env[62875]: DEBUG nova.network.neutron [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updating instance_info_cache with network_info: [{"id": "3f89ecea-c779-49ec-8304-34ae86acec1f", "address": "fa:16:3e:6f:74:65", "network": {"id": "b5106368-959d-4519-bfe6-05d3048ec300", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37495441", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.211", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f89ecea-c7", "ovs_interfaceid": "3f89ecea-c779-49ec-8304-34ae86acec1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cf16fb35-456a-450b-8a75-bac02d08f481", "address": "fa:16:3e:44:9c:5e", "network": {"id": "be7b865a-da9e-4284-9313-6199ab2240a3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-308012527", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf16fb35-45", "ovs_interfaceid": "cf16fb35-456a-450b-8a75-bac02d08f481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.699090] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180140, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.712214] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180141, 'name': ReconfigVM_Task, 'duration_secs': 0.293459} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.712530] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 70547fbd-7ce8-466e-8abc-b490b8dd6b28/70547fbd-7ce8-466e-8abc-b490b8dd6b28.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2080.713201] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e90f6cd3-2e25-4a60-9968-1b453dc09aa1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.723155] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2080.723155] env[62875]: value = "task-2180142" [ 2080.723155] env[62875]: _type = "Task" [ 2080.723155] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.736820] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180142, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.764982] env[62875]: DEBUG nova.network.neutron [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2080.828965] env[62875]: INFO nova.compute.manager [-] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Took 1.59 seconds to deallocate network for instance. [ 2080.838894] env[62875]: DEBUG oslo_concurrency.lockutils [None req-136dfc52-d965-45a9-b648-f8c474442fad tempest-ServerMetadataNegativeTestJSON-920433756 tempest-ServerMetadataNegativeTestJSON-920433756-project-member] Lock "37ae8e69-f953-4846-8a21-fed697ea575a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.396s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.916591] env[62875]: DEBUG nova.network.neutron [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Updating instance_info_cache with network_info: [{"id": "a7a79e94-603f-457d-a72b-08c0228a924b", "address": "fa:16:3e:c4:28:50", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a79e94-60", "ovs_interfaceid": "a7a79e94-603f-457d-a72b-08c0228a924b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.055069] env[62875]: DEBUG nova.compute.manager [req-33ab66ef-fed7-41ed-b08f-884b12fecabb req-195dcb21-a47c-4bc6-b283-931a41ed9582 service nova] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Received event network-vif-deleted-78c0725e-6d52-4486-acdf-e95c7a1ae020 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2081.157368] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27673089-d49f-4887-b94c-f463604a476d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.167200] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3e3c8f-2c2f-4c70-adbc-5af8c92752f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.194117] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Detach interface failed, port_id=910a7aba-2677-4608-af5a-5efee055c3aa, reason: Instance 2106a09b-554e-41dd-aa3a-c190b62d0afc could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2081.194352] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-vif-deleted-cf16fb35-456a-450b-8a75-bac02d08f481 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2081.194526] env[62875]: INFO nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Neutron deleted interface cf16fb35-456a-450b-8a75-bac02d08f481; detaching it from the instance and deleting it from the info cache [ 2081.194899] env[62875]: DEBUG nova.network.neutron [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updating instance_info_cache with network_info: [{"id": "3f89ecea-c779-49ec-8304-34ae86acec1f", "address": "fa:16:3e:6f:74:65", "network": {"id": "b5106368-959d-4519-bfe6-05d3048ec300", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37495441", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.211", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f89ecea-c7", "ovs_interfaceid": "3f89ecea-c779-49ec-8304-34ae86acec1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.204099] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180140, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.133549} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.204837] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] c217e435-c5d8-406b-99ee-ec71580fb344/c217e435-c5d8-406b-99ee-ec71580fb344.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2081.204837] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2081.204837] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c3e090a-6224-4841-8e89-203d94140f98 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.211097] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2081.211097] env[62875]: value = "task-2180143" [ 2081.211097] env[62875]: _type = "Task" [ 2081.211097] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.219548] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180143, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.233355] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180142, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.335910] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.420168] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "refresh_cache-409b6902-f9ef-469b-a9db-4e93f764d199" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.420298] env[62875]: DEBUG nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Instance network_info: |[{"id": "a7a79e94-603f-457d-a72b-08c0228a924b", "address": "fa:16:3e:c4:28:50", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a79e94-60", "ovs_interfaceid": "a7a79e94-603f-457d-a72b-08c0228a924b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2081.421069] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:28:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7a79e94-603f-457d-a72b-08c0228a924b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2081.428969] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating folder: Project (bef7d358bb2746efb448dbf759cac58c). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2081.431987] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b8046ba-3202-46c5-935d-684abc79de28 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.442653] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Created folder: Project (bef7d358bb2746efb448dbf759cac58c) in parent group-v444854. [ 2081.442847] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating folder: Instances. Parent ref: group-v444918. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2081.443180] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2e2ffea-7053-4675-b996-9967a3adcb66 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.455069] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Created folder: Instances in parent group-v444918. [ 2081.455300] env[62875]: DEBUG oslo.service.loopingcall [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2081.455513] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2081.455734] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af63eb49-b2c0-4130-82d7-c6dbb435669b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.478913] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2081.478913] env[62875]: value = "task-2180146" [ 2081.478913] env[62875]: _type = "Task" [ 2081.478913] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.489100] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180146, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.627106] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e329c09a-bbea-4676-8e82-9b6c0bc20e4c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.632641] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76349c5e-fb8b-4ef6-aebf-7275dfe705b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.668399] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e231dc-52e8-42b4-934e-1606290d9ff7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.677151] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb75bc13-1984-427f-a5cd-f85f95dc9ef5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.691308] env[62875]: DEBUG nova.compute.provider_tree [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2081.700392] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2e7e4cf-2ef6-4c63-ba91-9543206fd3c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.711105] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ad4da7-8b62-4274-8bc0-7b65cb60ccc8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.730290] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180143, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068287} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.730898] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2081.738234] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4306c71-7e57-4801-a240-6b8d51e24eb2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.740900] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Detach interface failed, port_id=cf16fb35-456a-450b-8a75-bac02d08f481, reason: Instance 2106a09b-554e-41dd-aa3a-c190b62d0afc could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2081.741145] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Received event network-vif-deleted-3f89ecea-c779-49ec-8304-34ae86acec1f {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2081.741323] env[62875]: INFO nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Neutron deleted interface 3f89ecea-c779-49ec-8304-34ae86acec1f; detaching it from the instance and deleting it from the info cache [ 2081.741493] env[62875]: DEBUG nova.network.neutron [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.746363] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180142, 'name': Rename_Task, 'duration_secs': 0.846636} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.746910] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2081.747152] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c64bd4eb-b8e4-45fb-bd85-da42818e135b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.768118] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] c217e435-c5d8-406b-99ee-ec71580fb344/c217e435-c5d8-406b-99ee-ec71580fb344.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2081.769236] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cb423f7-450f-4192-a1cd-5c428a1b1b46 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.785165] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2081.785165] env[62875]: value = "task-2180147" [ 2081.785165] env[62875]: _type = "Task" [ 2081.785165] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.790155] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2081.790155] env[62875]: value = "task-2180148" [ 2081.790155] env[62875]: _type = "Task" [ 2081.790155] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.796443] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180147, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.801158] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180148, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.989714] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180146, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.232245] env[62875]: DEBUG nova.scheduler.client.report [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2082.232573] env[62875]: DEBUG nova.compute.provider_tree [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 72 to 73 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2082.232748] env[62875]: DEBUG nova.compute.provider_tree [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2082.249795] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-591d388a-45c2-436a-b075-e8df2f0228f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.262683] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d2bf88-6a15-4fd3-9b73-8c40b9be288a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.292808] env[62875]: DEBUG nova.compute.manager [req-4d02a072-2a2a-43f0-8372-b4f5ae41601e req-a23af9b1-489f-47ff-8f57-8538b725bd55 service nova] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Detach interface failed, port_id=3f89ecea-c779-49ec-8304-34ae86acec1f, reason: Instance 2106a09b-554e-41dd-aa3a-c190b62d0afc could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2082.305338] env[62875]: DEBUG oslo_vmware.api [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180147, 'name': PowerOnVM_Task, 'duration_secs': 0.465065} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.308888] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2082.309253] env[62875]: INFO nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Took 9.71 seconds to spawn the instance on the hypervisor. [ 2082.309540] env[62875]: DEBUG nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2082.310671] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180148, 'name': ReconfigVM_Task, 'duration_secs': 0.282581} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.311599] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17a9a47-4878-49c9-9c71-eae2d85f8776 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.315728] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Reconfigured VM instance instance-00000037 to attach disk [datastore2] c217e435-c5d8-406b-99ee-ec71580fb344/c217e435-c5d8-406b-99ee-ec71580fb344.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2082.316769] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e363933c-8b02-436e-832f-6cb66e6607b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.326832] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2082.326832] env[62875]: value = "task-2180149" [ 2082.326832] env[62875]: _type = "Task" [ 2082.326832] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.336233] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180149, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.493959] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180146, 'name': CreateVM_Task, 'duration_secs': 0.556621} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.494932] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2082.495711] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.495931] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.496322] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2082.497027] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b05d69ba-e88b-438c-8663-0a6e7b9d1d07 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.502283] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2082.502283] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bf0278-46d3-b95f-a8d6-89b57a2d85d4" [ 2082.502283] env[62875]: _type = "Task" [ 2082.502283] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.513271] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bf0278-46d3-b95f-a8d6-89b57a2d85d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.741426] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2082.741948] env[62875]: DEBUG nova.compute.manager [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2082.745910] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 20.925s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.842847] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180149, 'name': Rename_Task, 'duration_secs': 0.163866} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.843416] env[62875]: INFO nova.compute.manager [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Took 30.99 seconds to build instance. [ 2082.844858] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2082.845505] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a744538-17c8-4927-a47f-c82cbebbfaaa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.852286] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2082.852286] env[62875]: value = "task-2180150" [ 2082.852286] env[62875]: _type = "Task" [ 2082.852286] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.861375] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.013097] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bf0278-46d3-b95f-a8d6-89b57a2d85d4, 'name': SearchDatastore_Task, 'duration_secs': 0.012917} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.013422] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.013923] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2083.013923] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.014065] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.014216] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2083.014514] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-448c3584-7eaa-4748-8f56-875717356403 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.023577] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2083.024975] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2083.025734] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efb86448-a9e1-4ae7-8917-94fb6295e6b9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.031762] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2083.031762] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd376e-e17e-a9ec-f8a1-cd9dea5fdc92" [ 2083.031762] env[62875]: _type = "Task" [ 2083.031762] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.039657] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd376e-e17e-a9ec-f8a1-cd9dea5fdc92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.258012] env[62875]: DEBUG nova.compute.utils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2083.261926] env[62875]: DEBUG nova.compute.manager [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Not allocating networking since 'none' was specified. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2083.348057] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c51aa72-a37e-43cf-8825-97ce860ab846 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.636s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.364474] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180150, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.544768] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd376e-e17e-a9ec-f8a1-cd9dea5fdc92, 'name': SearchDatastore_Task, 'duration_secs': 0.008022} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.545478] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20dc1fc4-2616-4957-88c5-3f7330c7c0d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.551965] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2083.551965] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d17af1-d81e-d4d2-7578-6c1696a7acee" [ 2083.551965] env[62875]: _type = "Task" [ 2083.551965] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.560955] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d17af1-d81e-d4d2-7578-6c1696a7acee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.763977] env[62875]: DEBUG nova.compute.manager [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2083.795086] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7969485a-ccd6-48e0-bdea-b8920af28843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.795302] env[62875]: WARNING nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 305aebbe-f983-4826-b8c0-9854458f7d48 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2083.795432] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d0c4095f-2d78-4055-b568-7e70e7c4c182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.795565] env[62875]: WARNING nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 8361611a-ad16-43ef-94e0-f2e7e9851682 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2083.795695] env[62875]: WARNING nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2106a09b-554e-41dd-aa3a-c190b62d0afc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2083.795822] env[62875]: WARNING nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 816e0ecb-6476-49bb-9fea-a01067f25b51 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2083.795947] env[62875]: WARNING nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2cf54268-5499-49c9-8029-68b3866581d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2083.796083] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 9e0aaea6-96cf-494d-9f70-a709a47f9772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.796202] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.796315] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 70547fbd-7ce8-466e-8abc-b490b8dd6b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.796428] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c217e435-c5d8-406b-99ee-ec71580fb344 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.796532] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 409b6902-f9ef-469b-a9db-4e93f764d199 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.796641] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 1230e54c-701a-4406-95bd-14e32914bc8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.868170] env[62875]: DEBUG oslo_vmware.api [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180150, 'name': PowerOnVM_Task, 'duration_secs': 0.848456} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.868466] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2083.869288] env[62875]: INFO nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Took 8.83 seconds to spawn the instance on the hypervisor. [ 2083.869288] env[62875]: DEBUG nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2083.870022] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367bf4dd-cda4-4c74-9c5e-2bc34949d04b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.065630] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d17af1-d81e-d4d2-7578-6c1696a7acee, 'name': SearchDatastore_Task, 'duration_secs': 0.009612} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.066342] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.066735] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 409b6902-f9ef-469b-a9db-4e93f764d199/409b6902-f9ef-469b-a9db-4e93f764d199.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2084.069400] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-030bbc47-8f51-4684-9fa6-a001ba6db7de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.075551] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2084.075551] env[62875]: value = "task-2180151" [ 2084.075551] env[62875]: _type = "Task" [ 2084.075551] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.084163] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.121160] env[62875]: DEBUG nova.compute.manager [req-6c5e19de-6609-4563-a50d-f011f8912bc5 req-1a07bd54-772a-4e07-88ff-b484ddaf45be service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Received event network-changed-4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2084.121160] env[62875]: DEBUG nova.compute.manager [req-6c5e19de-6609-4563-a50d-f011f8912bc5 req-1a07bd54-772a-4e07-88ff-b484ddaf45be service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Refreshing instance network info cache due to event network-changed-4d852812-d3cb-456e-992b-fe1bb2ebc36e. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2084.121160] env[62875]: DEBUG oslo_concurrency.lockutils [req-6c5e19de-6609-4563-a50d-f011f8912bc5 req-1a07bd54-772a-4e07-88ff-b484ddaf45be service nova] Acquiring lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2084.121160] env[62875]: DEBUG oslo_concurrency.lockutils [req-6c5e19de-6609-4563-a50d-f011f8912bc5 req-1a07bd54-772a-4e07-88ff-b484ddaf45be service nova] Acquired lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2084.121160] env[62875]: DEBUG nova.network.neutron [req-6c5e19de-6609-4563-a50d-f011f8912bc5 req-1a07bd54-772a-4e07-88ff-b484ddaf45be service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Refreshing network info cache for port 4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2084.304664] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 45403db3-ff20-42d3-8a37-8db671d8c1fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2084.349514] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "8f817564-b224-4dcb-bd8c-4d63509a5628" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.349759] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "8f817564-b224-4dcb-bd8c-4d63509a5628" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.391962] env[62875]: INFO nova.compute.manager [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Took 31.59 seconds to build instance. [ 2084.588981] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180151, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.775590] env[62875]: DEBUG nova.compute.manager [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2084.809489] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2084.816528] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2084.816814] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2084.816978] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2084.817175] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2084.817324] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2084.817471] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2084.817679] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2084.817857] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2084.818061] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2084.818234] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2084.818407] env[62875]: DEBUG nova.virt.hardware [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2084.819333] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff7b1d4-a5a6-4181-9d7a-e70679ba8e89 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.831011] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17174f0c-710e-4bd1-bba5-79284f565fc2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.846475] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2084.853261] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Creating folder: Project (bb55a1a7615d4f46a7cd8fc3bc53f811). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2084.853261] env[62875]: DEBUG nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2084.855482] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16cfc19e-c1d4-43ae-a91f-87d10f32b098 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.865837] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Created folder: Project (bb55a1a7615d4f46a7cd8fc3bc53f811) in parent group-v444854. [ 2084.866052] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Creating folder: Instances. Parent ref: group-v444921. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2084.866289] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a39683e-6ecf-4ffb-bbe1-fb44460f3f73 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.876174] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Created folder: Instances in parent group-v444921. [ 2084.876489] env[62875]: DEBUG oslo.service.loopingcall [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2084.883155] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2084.883401] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-975d9c60-28f3-488b-83e6-2729d73c9be9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.896629] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2be9dcd9-7a16-4566-a4bd-936fde1cf4b4 tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "c217e435-c5d8-406b-99ee-ec71580fb344" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.109s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.902367] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2084.902367] env[62875]: value = "task-2180154" [ 2084.902367] env[62875]: _type = "Task" [ 2084.902367] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.912508] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180154, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.048612] env[62875]: DEBUG nova.network.neutron [req-6c5e19de-6609-4563-a50d-f011f8912bc5 req-1a07bd54-772a-4e07-88ff-b484ddaf45be service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Updated VIF entry in instance network info cache for port 4d852812-d3cb-456e-992b-fe1bb2ebc36e. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2085.051022] env[62875]: DEBUG nova.network.neutron [req-6c5e19de-6609-4563-a50d-f011f8912bc5 req-1a07bd54-772a-4e07-88ff-b484ddaf45be service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Updating instance_info_cache with network_info: [{"id": "4d852812-d3cb-456e-992b-fe1bb2ebc36e", "address": "fa:16:3e:50:e2:e2", "network": {"id": "3df89f88-8add-4870-a7b8-609ae5e1e872", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1644883682-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b5b23e271c6472f893db30de9916fbf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d852812-d3", "ovs_interfaceid": "4d852812-d3cb-456e-992b-fe1bb2ebc36e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.089749] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180151, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.750804} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.090023] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 409b6902-f9ef-469b-a9db-4e93f764d199/409b6902-f9ef-469b-a9db-4e93f764d199.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2085.090301] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2085.090595] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df6b8d22-1733-4239-b31d-c4b7b349de7a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.098540] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2085.098540] env[62875]: value = "task-2180155" [ 2085.098540] env[62875]: _type = "Task" [ 2085.098540] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.112659] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180155, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.311980] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a19f5bee-ece8-4aa3-8c33-9474da385238 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2085.381377] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.413466] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180154, 'name': CreateVM_Task, 'duration_secs': 0.350639} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.413657] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2085.414183] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2085.414431] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2085.414863] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2085.415223] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e6142dd-4bac-4ac6-beef-8bc21b32a002 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.419756] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2085.419756] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5264d4a1-6ac9-26f3-757b-f3fa4614319b" [ 2085.419756] env[62875]: _type = "Task" [ 2085.419756] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.427977] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5264d4a1-6ac9-26f3-757b-f3fa4614319b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.551905] env[62875]: DEBUG oslo_concurrency.lockutils [req-6c5e19de-6609-4563-a50d-f011f8912bc5 req-1a07bd54-772a-4e07-88ff-b484ddaf45be service nova] Releasing lock "refresh_cache-70547fbd-7ce8-466e-8abc-b490b8dd6b28" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2085.607626] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180155, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07156} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.608855] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2085.608855] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377038ce-356e-428b-afc5-456be498adeb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.630691] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 409b6902-f9ef-469b-a9db-4e93f764d199/409b6902-f9ef-469b-a9db-4e93f764d199.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2085.631020] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d15d597-6709-4693-89eb-3ae550526d1e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.650800] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2085.650800] env[62875]: value = "task-2180156" [ 2085.650800] env[62875]: _type = "Task" [ 2085.650800] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.668757] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180156, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.748515] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquiring lock "c217e435-c5d8-406b-99ee-ec71580fb344" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.748786] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "c217e435-c5d8-406b-99ee-ec71580fb344" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.748996] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquiring lock "c217e435-c5d8-406b-99ee-ec71580fb344-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.749311] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "c217e435-c5d8-406b-99ee-ec71580fb344-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.749466] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "c217e435-c5d8-406b-99ee-ec71580fb344-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.751867] env[62875]: INFO nova.compute.manager [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Terminating instance [ 2085.815410] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2085.931112] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5264d4a1-6ac9-26f3-757b-f3fa4614319b, 'name': SearchDatastore_Task, 'duration_secs': 0.018853} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.931635] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2085.931716] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2085.931888] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2085.932054] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2085.932281] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2085.932467] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ce1b9fd-c4c0-4efc-9e17-35e2914c5bfd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.940430] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2085.940705] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2085.941456] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f76bb8f-2990-4734-ae09-dc2c76b703b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.948619] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2085.948619] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a813ba-4c29-0f78-1edb-7fd6cda19852" [ 2085.948619] env[62875]: _type = "Task" [ 2085.948619] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.956591] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a813ba-4c29-0f78-1edb-7fd6cda19852, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.160902] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180156, 'name': ReconfigVM_Task, 'duration_secs': 0.270966} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.161254] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 409b6902-f9ef-469b-a9db-4e93f764d199/409b6902-f9ef-469b-a9db-4e93f764d199.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2086.161904] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7881b78-bfa3-4964-8462-ec722b8a7a67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.168666] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2086.168666] env[62875]: value = "task-2180157" [ 2086.168666] env[62875]: _type = "Task" [ 2086.168666] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.180018] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180157, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.258642] env[62875]: DEBUG nova.compute.manager [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2086.258880] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2086.262532] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cc2af8-833b-4494-a87e-77d0f5df0647 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.268032] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2086.268295] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c88b09fe-afbe-4929-8f3a-9a2f4a26f8e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.274347] env[62875]: DEBUG oslo_vmware.api [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2086.274347] env[62875]: value = "task-2180158" [ 2086.274347] env[62875]: _type = "Task" [ 2086.274347] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.284097] env[62875]: DEBUG oslo_vmware.api [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.318859] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance e811f624-2dda-468c-ab28-9744c300eb1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2086.460873] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a813ba-4c29-0f78-1edb-7fd6cda19852, 'name': SearchDatastore_Task, 'duration_secs': 0.008457} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.461701] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad26f8e2-2ced-4af6-ad9d-7240b3821195 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.467468] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2086.467468] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5293575f-e7b9-4fe4-dadd-62b4cbac7d7e" [ 2086.467468] env[62875]: _type = "Task" [ 2086.467468] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.475461] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5293575f-e7b9-4fe4-dadd-62b4cbac7d7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.678913] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180157, 'name': Rename_Task, 'duration_secs': 0.145648} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.679216] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2086.679456] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6db05f70-ff07-450d-87a5-b335cc46a84e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.686279] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2086.686279] env[62875]: value = "task-2180159" [ 2086.686279] env[62875]: _type = "Task" [ 2086.686279] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.693733] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180159, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.784197] env[62875]: DEBUG oslo_vmware.api [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180158, 'name': PowerOffVM_Task, 'duration_secs': 0.326784} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.784463] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2086.784632] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2086.784878] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1e7f016-9103-4154-918f-640ee0ae59c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.823417] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 44a248f6-443c-4b7c-95f0-088f0cdb924d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2086.823705] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2086.823851] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2086.898333] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2086.898570] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2086.898817] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Deleting the datastore file [datastore2] c217e435-c5d8-406b-99ee-ec71580fb344 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2086.899091] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-345af605-100c-4150-8598-f41195f56950 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.906185] env[62875]: DEBUG oslo_vmware.api [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for the task: (returnval){ [ 2086.906185] env[62875]: value = "task-2180161" [ 2086.906185] env[62875]: _type = "Task" [ 2086.906185] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.916842] env[62875]: DEBUG oslo_vmware.api [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.977817] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5293575f-e7b9-4fe4-dadd-62b4cbac7d7e, 'name': SearchDatastore_Task, 'duration_secs': 0.009345} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.980550] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2086.980852] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 1230e54c-701a-4406-95bd-14e32914bc8d/1230e54c-701a-4406-95bd-14e32914bc8d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2086.981348] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97cfd12e-5204-4641-b04b-9454c2f712a3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.988237] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2086.988237] env[62875]: value = "task-2180162" [ 2086.988237] env[62875]: _type = "Task" [ 2086.988237] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.999022] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.002835] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquiring lock "7f16b893-02e4-4395-b787-f82bc4549e4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.003062] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "7f16b893-02e4-4395-b787-f82bc4549e4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.104599] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecdfe72-1a60-4375-bb31-62072c932d5c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.112454] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c458ed-80dc-499d-b7a1-c205bffed1bd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.143385] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7771314-3546-4cd2-a734-f9a1df2db2bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.154040] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd9bc1a-1126-4f35-a772-4ee8f595e3a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.164677] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2087.197196] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180159, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.422612] env[62875]: DEBUG oslo_vmware.api [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Task: {'id': task-2180161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142251} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.422922] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2087.423084] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2087.423268] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2087.423441] env[62875]: INFO nova.compute.manager [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2087.423687] env[62875]: DEBUG oslo.service.loopingcall [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2087.423884] env[62875]: DEBUG nova.compute.manager [-] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2087.423995] env[62875]: DEBUG nova.network.neutron [-] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2087.501359] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180162, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.507231] env[62875]: DEBUG nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2087.671071] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2087.698920] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180159, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.788871] env[62875]: DEBUG nova.compute.manager [req-e0f873ec-4fca-4d41-bb52-afa0384562a4 req-9ab472f9-01be-4f44-8bf9-8a00915c0abf service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Received event network-vif-deleted-7632ec90-6680-4447-b3c3-b39afcc641b3 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2087.789097] env[62875]: INFO nova.compute.manager [req-e0f873ec-4fca-4d41-bb52-afa0384562a4 req-9ab472f9-01be-4f44-8bf9-8a00915c0abf service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Neutron deleted interface 7632ec90-6680-4447-b3c3-b39afcc641b3; detaching it from the instance and deleting it from the info cache [ 2087.789297] env[62875]: DEBUG nova.network.neutron [req-e0f873ec-4fca-4d41-bb52-afa0384562a4 req-9ab472f9-01be-4f44-8bf9-8a00915c0abf service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2087.998449] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.787095} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.999606] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 1230e54c-701a-4406-95bd-14e32914bc8d/1230e54c-701a-4406-95bd-14e32914bc8d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2087.999606] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2087.999606] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03b2fe0d-867a-45d5-af50-d297aba2215e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.005479] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2088.005479] env[62875]: value = "task-2180163" [ 2088.005479] env[62875]: _type = "Task" [ 2088.005479] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.015602] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180163, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.030639] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.178105] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2088.178369] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.432s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.178671] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.889s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.180346] env[62875]: INFO nova.compute.claims [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2088.183335] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.183486] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances with incomplete migration {{(pid=62875) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 2088.197276] env[62875]: DEBUG oslo_vmware.api [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180159, 'name': PowerOnVM_Task, 'duration_secs': 1.284992} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.197531] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2088.197728] env[62875]: INFO nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Took 10.72 seconds to spawn the instance on the hypervisor. [ 2088.197919] env[62875]: DEBUG nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2088.198746] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903aeedc-b61d-46ad-b6de-10eb353afc7a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.272528] env[62875]: DEBUG nova.network.neutron [-] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.291744] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0fc2b375-3e61-410f-aa68-cdf63af870b7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.301212] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a91de6-6eb9-4ea3-ac1b-232be9edcea6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.329339] env[62875]: DEBUG nova.compute.manager [req-e0f873ec-4fca-4d41-bb52-afa0384562a4 req-9ab472f9-01be-4f44-8bf9-8a00915c0abf service nova] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Detach interface failed, port_id=7632ec90-6680-4447-b3c3-b39afcc641b3, reason: Instance c217e435-c5d8-406b-99ee-ec71580fb344 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2088.517721] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180163, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.318844} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.518020] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2088.518718] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da88c0d-b8f3-45e6-8dbf-4be78865aa23 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.538229] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 1230e54c-701a-4406-95bd-14e32914bc8d/1230e54c-701a-4406-95bd-14e32914bc8d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2088.538514] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41836b4d-17a7-418d-99fd-753dea424baf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.558382] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2088.558382] env[62875]: value = "task-2180164" [ 2088.558382] env[62875]: _type = "Task" [ 2088.558382] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.566363] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180164, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.689026] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.718521] env[62875]: INFO nova.compute.manager [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Took 34.55 seconds to build instance. [ 2088.776024] env[62875]: INFO nova.compute.manager [-] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Took 1.35 seconds to deallocate network for instance. [ 2089.069272] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180164, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.222700] env[62875]: DEBUG oslo_concurrency.lockutils [None req-20d3dfcd-5df6-418f-bba1-86232cdf98f1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "409b6902-f9ef-469b-a9db-4e93f764d199" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.916s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.283179] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.421665] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5893c4-c14c-43e6-be6f-f899508761b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.429344] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e710882c-a669-4e97-87f4-ab18c1e97941 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.459058] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae393ad-3d5d-4d45-8edf-7617eda223a5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.466371] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138ae425-e058-4fd6-901f-508a289874c4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.480876] env[62875]: DEBUG nova.compute.provider_tree [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2089.569169] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180164, 'name': ReconfigVM_Task, 'duration_secs': 0.550371} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.569474] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 1230e54c-701a-4406-95bd-14e32914bc8d/1230e54c-701a-4406-95bd-14e32914bc8d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2089.570115] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12b5e200-5935-40c9-ab15-2b89a230c904 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.576272] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2089.576272] env[62875]: value = "task-2180165" [ 2089.576272] env[62875]: _type = "Task" [ 2089.576272] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.583813] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180165, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.986773] env[62875]: DEBUG nova.scheduler.client.report [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2090.039141] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82703161-0bb8-421f-82c9-c065c71aa679 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.045759] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b14d17b3-7900-4de0-a4d5-89187a6a0cf8 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Suspending the VM {{(pid=62875) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2090.045982] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e83eb9a7-cb57-496d-8a9c-a28634931764 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.052264] env[62875]: DEBUG oslo_vmware.api [None req-b14d17b3-7900-4de0-a4d5-89187a6a0cf8 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2090.052264] env[62875]: value = "task-2180166" [ 2090.052264] env[62875]: _type = "Task" [ 2090.052264] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.059879] env[62875]: DEBUG oslo_vmware.api [None req-b14d17b3-7900-4de0-a4d5-89187a6a0cf8 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180166, 'name': SuspendVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.085025] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180165, 'name': Rename_Task, 'duration_secs': 0.120433} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.085342] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2090.085584] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4db25a2-9e14-4fa0-912f-5c3d41f668ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.091892] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2090.091892] env[62875]: value = "task-2180167" [ 2090.091892] env[62875]: _type = "Task" [ 2090.091892] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.099916] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180167, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.492223] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.492842] env[62875]: DEBUG nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2090.495760] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.811s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.497209] env[62875]: INFO nova.compute.claims [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2090.562323] env[62875]: DEBUG oslo_vmware.api [None req-b14d17b3-7900-4de0-a4d5-89187a6a0cf8 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180166, 'name': SuspendVM_Task} progress is 70%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.601679] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180167, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.005370] env[62875]: DEBUG nova.compute.utils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2091.005500] env[62875]: DEBUG nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2091.005555] env[62875]: DEBUG nova.network.neutron [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2091.054291] env[62875]: DEBUG nova.policy [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '791a39d3328040d5aa1140485a997d43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b93283112aad44f4833c1cc017a566db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2091.065447] env[62875]: DEBUG oslo_vmware.api [None req-b14d17b3-7900-4de0-a4d5-89187a6a0cf8 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180166, 'name': SuspendVM_Task, 'duration_secs': 0.629232} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.065710] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b14d17b3-7900-4de0-a4d5-89187a6a0cf8 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Suspended the VM {{(pid=62875) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2091.065888] env[62875]: DEBUG nova.compute.manager [None req-b14d17b3-7900-4de0-a4d5-89187a6a0cf8 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2091.066827] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad42714-afa3-4979-beed-9d7a19da4a31 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.103516] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180167, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.346617] env[62875]: DEBUG nova.network.neutron [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Successfully created port: 76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2091.512413] env[62875]: DEBUG nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2091.604913] env[62875]: DEBUG oslo_vmware.api [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180167, 'name': PowerOnVM_Task, 'duration_secs': 1.240681} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.605239] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2091.605543] env[62875]: INFO nova.compute.manager [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Took 6.83 seconds to spawn the instance on the hypervisor. [ 2091.605742] env[62875]: DEBUG nova.compute.manager [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2091.606550] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2583ba5-4454-450b-b3cc-f4730c2cae90 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.836177] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4337464-492e-4dc7-bf84-f8c6adb60a42 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.844202] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06067b67-d657-4cf1-8a90-03700a607e16 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.878281] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2690ae12-a395-477b-85e6-e651d2890d97 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.886365] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aea6289-1dd6-47bf-99a1-0283252be7b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.902431] env[62875]: DEBUG nova.compute.provider_tree [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2092.125277] env[62875]: INFO nova.compute.manager [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Took 31.31 seconds to build instance. [ 2092.367215] env[62875]: DEBUG nova.compute.manager [None req-27e4f430-384e-4ae6-b02f-477c5ec2632a tempest-ServerDiagnosticsV248Test-608639203 tempest-ServerDiagnosticsV248Test-608639203-project-admin] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2092.368446] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adff3cf1-ab7a-43b5-8596-82c2df58f569 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.376018] env[62875]: INFO nova.compute.manager [None req-27e4f430-384e-4ae6-b02f-477c5ec2632a tempest-ServerDiagnosticsV248Test-608639203 tempest-ServerDiagnosticsV248Test-608639203-project-admin] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Retrieving diagnostics [ 2092.377032] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757dec3c-a2ac-4367-b7dd-5033e4e9bb48 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.408069] env[62875]: DEBUG nova.scheduler.client.report [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2092.523136] env[62875]: DEBUG nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2092.549016] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2092.549401] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2092.549644] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2092.549921] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2092.550182] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2092.550415] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2092.550721] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2092.550964] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2092.551253] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2092.551497] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2092.551763] env[62875]: DEBUG nova.virt.hardware [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2092.553346] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13356703-c5f5-44b8-b38f-f8d0fbdf0bb9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.562691] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46315019-3674-4aca-9a9a-8860ebe13421 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.626929] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b6531627-e832-4ac3-91ba-d72e9e42c715 tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "1230e54c-701a-4406-95bd-14e32914bc8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.339s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.912841] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.417s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.913356] env[62875]: DEBUG nova.compute.manager [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2092.916528] env[62875]: DEBUG nova.network.neutron [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Successfully updated port: 76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2092.917936] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.063s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.918019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.919945] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.139s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.921806] env[62875]: INFO nova.compute.claims [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2092.930951] env[62875]: DEBUG nova.compute.manager [req-89a50ea5-e2d0-45bf-ad3c-271b7d7f1abd req-bb4ff806-9deb-47b6-aca9-e361d132207d service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Received event network-vif-plugged-76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2092.930951] env[62875]: DEBUG oslo_concurrency.lockutils [req-89a50ea5-e2d0-45bf-ad3c-271b7d7f1abd req-bb4ff806-9deb-47b6-aca9-e361d132207d service nova] Acquiring lock "45403db3-ff20-42d3-8a37-8db671d8c1fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.930951] env[62875]: DEBUG oslo_concurrency.lockutils [req-89a50ea5-e2d0-45bf-ad3c-271b7d7f1abd req-bb4ff806-9deb-47b6-aca9-e361d132207d service nova] Lock "45403db3-ff20-42d3-8a37-8db671d8c1fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.930951] env[62875]: DEBUG oslo_concurrency.lockutils [req-89a50ea5-e2d0-45bf-ad3c-271b7d7f1abd req-bb4ff806-9deb-47b6-aca9-e361d132207d service nova] Lock "45403db3-ff20-42d3-8a37-8db671d8c1fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.930951] env[62875]: DEBUG nova.compute.manager [req-89a50ea5-e2d0-45bf-ad3c-271b7d7f1abd req-bb4ff806-9deb-47b6-aca9-e361d132207d service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] No waiting events found dispatching network-vif-plugged-76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2092.930951] env[62875]: WARNING nova.compute.manager [req-89a50ea5-e2d0-45bf-ad3c-271b7d7f1abd req-bb4ff806-9deb-47b6-aca9-e361d132207d service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Received unexpected event network-vif-plugged-76f0c221-d292-49e7-9f24-d43fa768395b for instance with vm_state building and task_state spawning. [ 2092.958280] env[62875]: INFO nova.scheduler.client.report [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted allocations for instance 305aebbe-f983-4826-b8c0-9854458f7d48 [ 2093.425088] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2093.425306] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2093.425601] env[62875]: DEBUG nova.network.neutron [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2093.428283] env[62875]: DEBUG nova.compute.utils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2093.431576] env[62875]: DEBUG nova.compute.manager [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Not allocating networking since 'none' was specified. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2093.465104] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a663da3b-b9d8-435d-ac99-94f03c88d1fc tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "305aebbe-f983-4826-b8c0-9854458f7d48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.952s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.668056] env[62875]: DEBUG nova.compute.manager [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2093.669073] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bcb38d-3aa3-4724-987d-8d69f5c0aca8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.935546] env[62875]: DEBUG nova.compute.manager [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2093.974833] env[62875]: DEBUG nova.network.neutron [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2094.156690] env[62875]: DEBUG nova.network.neutron [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Updating instance_info_cache with network_info: [{"id": "76f0c221-d292-49e7-9f24-d43fa768395b", "address": "fa:16:3e:84:0b:36", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f0c221-d2", "ovs_interfaceid": "76f0c221-d292-49e7-9f24-d43fa768395b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.180591] env[62875]: INFO nova.compute.manager [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] instance snapshotting [ 2094.180790] env[62875]: WARNING nova.compute.manager [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 2094.183234] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142627dc-ecc2-4001-a84a-f044f4e128c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.207863] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47807415-7f93-4413-8b1e-1775b0ed4d13 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.229258] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99cd14e5-0688-4a49-9b17-decf8fa61d8b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.236557] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc7df5e-a994-47d6-be8c-90bc2fe42627 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.265948] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19cb825-f64d-492d-950b-93066d456560 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.273381] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c90ad3-2bcd-48a7-9484-5c76f9486d81 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.287837] env[62875]: DEBUG nova.compute.provider_tree [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2094.600554] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.600747] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.658708] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2094.658980] env[62875]: DEBUG nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Instance network_info: |[{"id": "76f0c221-d292-49e7-9f24-d43fa768395b", "address": "fa:16:3e:84:0b:36", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f0c221-d2", "ovs_interfaceid": "76f0c221-d292-49e7-9f24-d43fa768395b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2094.659401] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:0b:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76f0c221-d292-49e7-9f24-d43fa768395b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2094.666791] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Creating folder: Project (b93283112aad44f4833c1cc017a566db). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2094.667313] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9644ce11-7830-4af8-98a5-e42069a4e83c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.678177] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Created folder: Project (b93283112aad44f4833c1cc017a566db) in parent group-v444854. [ 2094.678451] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Creating folder: Instances. Parent ref: group-v444924. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2094.678594] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5f89b6b-8c21-4539-a849-4b7f1fc2fd44 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.686921] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Created folder: Instances in parent group-v444924. [ 2094.687170] env[62875]: DEBUG oslo.service.loopingcall [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2094.687355] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2094.687553] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5635b5b0-7b93-4d02-9391-7dfdafad6779 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.706187] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2094.706187] env[62875]: value = "task-2180170" [ 2094.706187] env[62875]: _type = "Task" [ 2094.706187] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.713401] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180170, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.717502] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2094.717740] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0277987d-d62f-4500-9673-a2148412f351 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.723337] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2094.723337] env[62875]: value = "task-2180171" [ 2094.723337] env[62875]: _type = "Task" [ 2094.723337] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.730870] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180171, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.790934] env[62875]: DEBUG nova.scheduler.client.report [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2094.950615] env[62875]: DEBUG nova.compute.manager [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2094.956987] env[62875]: DEBUG nova.compute.manager [req-9579b791-6d19-4164-adaf-aee0cb5c9ceb req-9eda7da2-9bfd-4d57-a9ef-3af0bc053955 service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Received event network-changed-76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2094.957198] env[62875]: DEBUG nova.compute.manager [req-9579b791-6d19-4164-adaf-aee0cb5c9ceb req-9eda7da2-9bfd-4d57-a9ef-3af0bc053955 service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Refreshing instance network info cache due to event network-changed-76f0c221-d292-49e7-9f24-d43fa768395b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2094.957398] env[62875]: DEBUG oslo_concurrency.lockutils [req-9579b791-6d19-4164-adaf-aee0cb5c9ceb req-9eda7da2-9bfd-4d57-a9ef-3af0bc053955 service nova] Acquiring lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2094.957559] env[62875]: DEBUG oslo_concurrency.lockutils [req-9579b791-6d19-4164-adaf-aee0cb5c9ceb req-9eda7da2-9bfd-4d57-a9ef-3af0bc053955 service nova] Acquired lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2094.957817] env[62875]: DEBUG nova.network.neutron [req-9579b791-6d19-4164-adaf-aee0cb5c9ceb req-9eda7da2-9bfd-4d57-a9ef-3af0bc053955 service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Refreshing network info cache for port 76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2094.976351] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2094.976614] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2094.976775] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2094.976959] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2094.977163] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2094.977330] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2094.977464] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2094.977629] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2094.977790] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2094.978017] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2094.978149] env[62875]: DEBUG nova.virt.hardware [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2094.979486] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62425c0-e8e8-4858-b79c-92745fc04e6b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.987536] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855c6507-41e6-45ac-9ede-4945522ac6db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.003182] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2095.008904] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Creating folder: Project (42f9d4b0f31a4efea8940a5335b3fba2). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2095.009466] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-315ecd3d-d69f-4330-bd51-8e3263dce726 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.017965] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Created folder: Project (42f9d4b0f31a4efea8940a5335b3fba2) in parent group-v444854. [ 2095.018164] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Creating folder: Instances. Parent ref: group-v444927. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2095.018382] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71eb5e6c-4593-4d74-b194-90e9bba78d47 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.025954] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Created folder: Instances in parent group-v444927. [ 2095.026189] env[62875]: DEBUG oslo.service.loopingcall [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2095.026367] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2095.026552] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec2dda4f-43a8-403c-bb93-9cb954a43c3b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.043090] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2095.043090] env[62875]: value = "task-2180174" [ 2095.043090] env[62875]: _type = "Task" [ 2095.043090] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.050351] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180174, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.103179] env[62875]: DEBUG nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2095.217039] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180170, 'name': CreateVM_Task, 'duration_secs': 0.309219} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.217039] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2095.217242] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2095.217329] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2095.217670] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2095.217939] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-586dffd3-504b-44d6-a016-1e04e40de45f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.222723] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2095.222723] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d261b5-aed6-6012-417f-6494369580c5" [ 2095.222723] env[62875]: _type = "Task" [ 2095.222723] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.238852] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d261b5-aed6-6012-417f-6494369580c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.239673] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180171, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.295935] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2095.296596] env[62875]: DEBUG nova.compute.manager [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2095.299808] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.263s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.300042] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2095.302161] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.177s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.303557] env[62875]: INFO nova.compute.claims [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2095.326585] env[62875]: INFO nova.scheduler.client.report [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted allocations for instance 816e0ecb-6476-49bb-9fea-a01067f25b51 [ 2095.554124] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180174, 'name': CreateVM_Task, 'duration_secs': 0.274259} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.554124] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2095.554506] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2095.622184] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.687177] env[62875]: DEBUG nova.network.neutron [req-9579b791-6d19-4164-adaf-aee0cb5c9ceb req-9eda7da2-9bfd-4d57-a9ef-3af0bc053955 service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Updated VIF entry in instance network info cache for port 76f0c221-d292-49e7-9f24-d43fa768395b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2095.687710] env[62875]: DEBUG nova.network.neutron [req-9579b791-6d19-4164-adaf-aee0cb5c9ceb req-9eda7da2-9bfd-4d57-a9ef-3af0bc053955 service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Updating instance_info_cache with network_info: [{"id": "76f0c221-d292-49e7-9f24-d43fa768395b", "address": "fa:16:3e:84:0b:36", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f0c221-d2", "ovs_interfaceid": "76f0c221-d292-49e7-9f24-d43fa768395b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2095.736282] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d261b5-aed6-6012-417f-6494369580c5, 'name': SearchDatastore_Task, 'duration_secs': 0.009945} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.739328] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2095.739565] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2095.739799] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2095.739948] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2095.740172] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2095.740437] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180171, 'name': CreateSnapshot_Task, 'duration_secs': 0.54587} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.740649] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2095.740947] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2095.741192] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6208fd5e-6bf8-451f-b23f-8d85e28b3753 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.742773] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2095.742992] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3859c030-df16-4eb5-a0d6-3bf9075096de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.744763] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2078dd-d120-4406-b794-76aca75b263c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.750737] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2095.750737] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521d2b52-b8bf-704a-d71b-7c7c55aac06c" [ 2095.750737] env[62875]: _type = "Task" [ 2095.750737] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.759847] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2095.760151] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2095.761032] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a67f3428-0843-490c-87f0-83be8dfce7f7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.768213] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521d2b52-b8bf-704a-d71b-7c7c55aac06c, 'name': SearchDatastore_Task, 'duration_secs': 0.008348} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.769450] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2095.769735] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2095.769994] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2095.770360] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2095.770360] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5230159d-4801-7ca0-bd57-c583433d5239" [ 2095.770360] env[62875]: _type = "Task" [ 2095.770360] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.778095] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5230159d-4801-7ca0-bd57-c583433d5239, 'name': SearchDatastore_Task, 'duration_secs': 0.008126} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.778787] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1b863f3-b2c0-4b36-8a5c-187c43d4eb55 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.783413] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2095.783413] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c3ae7b-7971-d82c-8346-52f2decb4f93" [ 2095.783413] env[62875]: _type = "Task" [ 2095.783413] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.790581] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c3ae7b-7971-d82c-8346-52f2decb4f93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.807831] env[62875]: DEBUG nova.compute.utils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2095.811206] env[62875]: DEBUG nova.compute.manager [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Not allocating networking since 'none' was specified. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2095.833053] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0d8cf443-4e48-4a82-83bb-7f0353cff687 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "816e0ecb-6476-49bb-9fea-a01067f25b51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.868s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.190844] env[62875]: DEBUG oslo_concurrency.lockutils [req-9579b791-6d19-4164-adaf-aee0cb5c9ceb req-9eda7da2-9bfd-4d57-a9ef-3af0bc053955 service nova] Releasing lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2096.269480] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2096.269878] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3304f997-6c41-4c9d-a1a5-74df2367cddb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.279475] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2096.279475] env[62875]: value = "task-2180175" [ 2096.279475] env[62875]: _type = "Task" [ 2096.279475] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.291623] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180175, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.298168] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c3ae7b-7971-d82c-8346-52f2decb4f93, 'name': SearchDatastore_Task, 'duration_secs': 0.00761} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.298400] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2096.298657] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 45403db3-ff20-42d3-8a37-8db671d8c1fa/45403db3-ff20-42d3-8a37-8db671d8c1fa.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2096.298912] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2096.299111] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2096.299321] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e57f8de-61e0-4a04-bd55-1e80f7ca8e01 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.301062] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc134a7d-1d61-4017-b178-21b13ac8b04d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.306917] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2096.306917] env[62875]: value = "task-2180176" [ 2096.306917] env[62875]: _type = "Task" [ 2096.306917] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.310550] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2096.310718] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2096.314061] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6231cc14-3b10-41aa-b341-2760abc36d69 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.320033] env[62875]: DEBUG nova.compute.manager [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2096.322031] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180176, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.325571] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2096.325571] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d77f96-6ec6-ee50-0407-fd633add0fb4" [ 2096.325571] env[62875]: _type = "Task" [ 2096.325571] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.333508] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d77f96-6ec6-ee50-0407-fd633add0fb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.577027] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef00afc-e0bc-4692-bdef-2cd0103b2414 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.583742] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0755772d-2284-4747-9577-0ce984cd635e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.614056] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30fc3448-e443-4ed1-82ad-b7d70b4a8174 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.622242] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee0e600-a8fe-41a2-97b5-e5929f9fb07c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.636352] env[62875]: DEBUG nova.compute.provider_tree [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2096.790721] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180175, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.818696] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180176, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.840366] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d77f96-6ec6-ee50-0407-fd633add0fb4, 'name': SearchDatastore_Task, 'duration_secs': 0.011668} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.841314] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e26d05e-5512-4837-b2de-d6bc58707f3d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.847651] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2096.847651] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb82b4-0102-2282-6392-e80cb96c2bad" [ 2096.847651] env[62875]: _type = "Task" [ 2096.847651] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.860091] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb82b4-0102-2282-6392-e80cb96c2bad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.140012] env[62875]: DEBUG nova.scheduler.client.report [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2097.261012] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "380229e2-25ba-47cb-a6ca-167b9d9672eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.261336] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "380229e2-25ba-47cb-a6ca-167b9d9672eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.290271] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180175, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.317364] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180176, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618319} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.317697] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 45403db3-ff20-42d3-8a37-8db671d8c1fa/45403db3-ff20-42d3-8a37-8db671d8c1fa.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2097.317911] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2097.318185] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c4a7658-7d9e-4c34-9cd8-0e6ff8d66b29 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.324362] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2097.324362] env[62875]: value = "task-2180177" [ 2097.324362] env[62875]: _type = "Task" [ 2097.324362] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.331406] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180177, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.335563] env[62875]: DEBUG nova.compute.manager [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2097.359049] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb82b4-0102-2282-6392-e80cb96c2bad, 'name': SearchDatastore_Task, 'duration_secs': 0.052969} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.361384] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2097.361686] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2097.361858] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2097.362074] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2097.362236] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2097.362385] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2097.362621] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2097.362788] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2097.362958] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2097.363155] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2097.363329] env[62875]: DEBUG nova.virt.hardware [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2097.363614] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2097.363848] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d/737c68b0-9ccf-4e0b-a46d-aa78f7981c3d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2097.364615] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d4ee3f-9def-43d8-be3e-7db6a4f00640 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.367399] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea7a707a-34d5-46f2-828f-525242c3d7e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.375793] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b73a847-4721-4943-9bbc-05d148107149 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.379863] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2097.379863] env[62875]: value = "task-2180178" [ 2097.379863] env[62875]: _type = "Task" [ 2097.379863] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.391491] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2097.397051] env[62875]: DEBUG oslo.service.loopingcall [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2097.397941] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2097.398215] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9bdd182-e963-4fed-86e5-41bfd4db3e1b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.414106] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.417786] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2097.417786] env[62875]: value = "task-2180179" [ 2097.417786] env[62875]: _type = "Task" [ 2097.417786] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.427192] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180179, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.645539] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.646188] env[62875]: DEBUG nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2097.648968] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.595s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.649228] env[62875]: DEBUG nova.objects.instance [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62875) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2097.763818] env[62875]: DEBUG nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2097.791290] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180175, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.834580] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180177, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067922} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.834676] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2097.835740] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599f6982-b3ca-4212-af04-96090b860ce1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.858278] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 45403db3-ff20-42d3-8a37-8db671d8c1fa/45403db3-ff20-42d3-8a37-8db671d8c1fa.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2097.858555] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0e94a4b-0fe2-4c4c-8623-498ea72ce397 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.878509] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2097.878509] env[62875]: value = "task-2180180" [ 2097.878509] env[62875]: _type = "Task" [ 2097.878509] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.889220] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180180, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.892066] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180178, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.929356] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180179, 'name': CreateVM_Task, 'duration_secs': 0.288375} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.929557] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2097.930126] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2097.930329] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2097.930728] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2097.931066] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b103415b-cd14-48cd-821f-547af19e8a6e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.936876] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2097.936876] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5290571b-bc26-3a3b-5070-6afd5c3b880e" [ 2097.936876] env[62875]: _type = "Task" [ 2097.936876] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.946543] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5290571b-bc26-3a3b-5070-6afd5c3b880e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.158440] env[62875]: DEBUG nova.compute.utils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2098.162691] env[62875]: DEBUG nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2098.162691] env[62875]: DEBUG nova.network.neutron [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2098.209978] env[62875]: DEBUG nova.policy [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1627c54e8e7f4712b9dcd174f991811e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '770eddfb80d943c7a34a3d9a60845079', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2098.287618] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.293466] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180175, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.394042] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180178, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.816468} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.397138] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d/737c68b0-9ccf-4e0b-a46d-aa78f7981c3d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2098.397381] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2098.397611] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180180, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.397820] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f20b4fe-1446-46fa-98e2-d22a3fe83bc7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.403931] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2098.403931] env[62875]: value = "task-2180181" [ 2098.403931] env[62875]: _type = "Task" [ 2098.403931] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.412594] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.447975] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5290571b-bc26-3a3b-5070-6afd5c3b880e, 'name': SearchDatastore_Task, 'duration_secs': 0.029113} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.448334] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2098.448655] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2098.448822] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2098.449086] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2098.449236] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2098.449503] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58e24572-0694-4188-993a-94fac43f607d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.464106] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2098.464106] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2098.464650] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65123115-de81-4714-9455-8c185a91d572 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.471401] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2098.471401] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dfd0a8-a646-5ff5-8ee5-7e91a7aa7acb" [ 2098.471401] env[62875]: _type = "Task" [ 2098.471401] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.480499] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dfd0a8-a646-5ff5-8ee5-7e91a7aa7acb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.516371] env[62875]: DEBUG nova.network.neutron [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Successfully created port: 55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2098.661732] env[62875]: DEBUG nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2098.665350] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9609761d-05b6-4ad0-98a5-113850243923 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.666374] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.147s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.667865] env[62875]: INFO nova.compute.claims [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2098.791710] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180175, 'name': CloneVM_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.889743] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180180, 'name': ReconfigVM_Task, 'duration_secs': 0.766562} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.890038] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 45403db3-ff20-42d3-8a37-8db671d8c1fa/45403db3-ff20-42d3-8a37-8db671d8c1fa.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2098.890773] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-542c8021-2b07-402d-954f-21d11eeb510e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.898624] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2098.898624] env[62875]: value = "task-2180182" [ 2098.898624] env[62875]: _type = "Task" [ 2098.898624] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.910634] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180182, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.915671] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065323} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.915907] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2098.916647] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcede5d-acf6-451b-8342-2f76963ef5cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.936570] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d/737c68b0-9ccf-4e0b-a46d-aa78f7981c3d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2098.936841] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-667ab31b-cbef-4f2c-b19e-27bdc452bd37 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.956534] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2098.956534] env[62875]: value = "task-2180183" [ 2098.956534] env[62875]: _type = "Task" [ 2098.956534] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.964448] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180183, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.980779] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dfd0a8-a646-5ff5-8ee5-7e91a7aa7acb, 'name': SearchDatastore_Task, 'duration_secs': 0.022876} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.981902] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5324939-8b38-464d-b2b7-e66b481bc5b7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.986867] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2098.986867] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eb4e4f-434b-1e75-37b1-c976fd845f0d" [ 2098.986867] env[62875]: _type = "Task" [ 2098.986867] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.994852] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eb4e4f-434b-1e75-37b1-c976fd845f0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.292218] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180175, 'name': CloneVM_Task, 'duration_secs': 2.543384} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.292342] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Created linked-clone VM from snapshot [ 2099.293232] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8013ad8e-fe46-433e-80c0-9141ae53e9ab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.300582] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Uploading image f5485f99-7141-4502-914d-f249d8bdef3f {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2099.322537] env[62875]: DEBUG oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2099.322537] env[62875]: value = "vm-444931" [ 2099.322537] env[62875]: _type = "VirtualMachine" [ 2099.322537] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2099.323113] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e756e634-875e-4c88-a0e0-9ab152ceee05 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.330781] env[62875]: DEBUG oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lease: (returnval){ [ 2099.330781] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e17330-3d4b-aa73-385c-461db6e3fd43" [ 2099.330781] env[62875]: _type = "HttpNfcLease" [ 2099.330781] env[62875]: } obtained for exporting VM: (result){ [ 2099.330781] env[62875]: value = "vm-444931" [ 2099.330781] env[62875]: _type = "VirtualMachine" [ 2099.330781] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2099.331000] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the lease: (returnval){ [ 2099.331000] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e17330-3d4b-aa73-385c-461db6e3fd43" [ 2099.331000] env[62875]: _type = "HttpNfcLease" [ 2099.331000] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2099.337930] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2099.337930] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e17330-3d4b-aa73-385c-461db6e3fd43" [ 2099.337930] env[62875]: _type = "HttpNfcLease" [ 2099.337930] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2099.408214] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180182, 'name': Rename_Task, 'duration_secs': 0.151626} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.408493] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2099.408728] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49655a80-4b79-4fc1-97ed-97b5b0bc4da2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.415282] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2099.415282] env[62875]: value = "task-2180185" [ 2099.415282] env[62875]: _type = "Task" [ 2099.415282] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.423026] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180185, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.466353] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180183, 'name': ReconfigVM_Task, 'duration_secs': 0.279168} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.466593] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d/737c68b0-9ccf-4e0b-a46d-aa78f7981c3d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2099.467214] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0722474f-5c32-4a24-910c-092cac0cf3cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.473623] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2099.473623] env[62875]: value = "task-2180186" [ 2099.473623] env[62875]: _type = "Task" [ 2099.473623] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.481638] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180186, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.496364] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eb4e4f-434b-1e75-37b1-c976fd845f0d, 'name': SearchDatastore_Task, 'duration_secs': 0.015006} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.496636] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2099.496947] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238/a19f5bee-ece8-4aa3-8c33-9474da385238.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2099.497255] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e6773da-c644-441c-8be9-70ac763bc12d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.503826] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2099.503826] env[62875]: value = "task-2180187" [ 2099.503826] env[62875]: _type = "Task" [ 2099.503826] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.512402] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.682675] env[62875]: DEBUG nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2099.704214] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2099.704524] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2099.704704] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2099.704902] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2099.705065] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2099.705217] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2099.705426] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2099.705585] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2099.705751] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2099.705911] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2099.706101] env[62875]: DEBUG nova.virt.hardware [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2099.707008] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1ee0f1-50d3-43bd-ae98-65d2c7911c83 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.718214] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd22d61e-3a81-4762-92fa-5b8805dd73e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.841750] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2099.841750] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e17330-3d4b-aa73-385c-461db6e3fd43" [ 2099.841750] env[62875]: _type = "HttpNfcLease" [ 2099.841750] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2099.842053] env[62875]: DEBUG oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2099.842053] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e17330-3d4b-aa73-385c-461db6e3fd43" [ 2099.842053] env[62875]: _type = "HttpNfcLease" [ 2099.842053] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2099.842873] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08987f8-6c75-4a49-b806-e4416ddc405d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.853052] env[62875]: DEBUG oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52818304-5eae-923e-7603-a7b1e8adcb93/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2099.853766] env[62875]: DEBUG oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52818304-5eae-923e-7603-a7b1e8adcb93/disk-0.vmdk for reading. {{(pid=62875) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2099.928774] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180185, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.959645] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7981722e-967c-4cfe-bb11-65eb2cabe292 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.989303] env[62875]: DEBUG nova.compute.manager [req-d19c5b0d-4539-4c55-a139-35914d79eb19 req-9d7121a0-abda-4e24-b24c-981719b8b68d service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Received event network-vif-plugged-55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2099.989475] env[62875]: DEBUG oslo_concurrency.lockutils [req-d19c5b0d-4539-4c55-a139-35914d79eb19 req-9d7121a0-abda-4e24-b24c-981719b8b68d service nova] Acquiring lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2099.989681] env[62875]: DEBUG oslo_concurrency.lockutils [req-d19c5b0d-4539-4c55-a139-35914d79eb19 req-9d7121a0-abda-4e24-b24c-981719b8b68d service nova] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.989849] env[62875]: DEBUG oslo_concurrency.lockutils [req-d19c5b0d-4539-4c55-a139-35914d79eb19 req-9d7121a0-abda-4e24-b24c-981719b8b68d service nova] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.990039] env[62875]: DEBUG nova.compute.manager [req-d19c5b0d-4539-4c55-a139-35914d79eb19 req-9d7121a0-abda-4e24-b24c-981719b8b68d service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] No waiting events found dispatching network-vif-plugged-55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2099.990365] env[62875]: WARNING nova.compute.manager [req-d19c5b0d-4539-4c55-a139-35914d79eb19 req-9d7121a0-abda-4e24-b24c-981719b8b68d service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Received unexpected event network-vif-plugged-55ce29a2-111a-4739-a7fc-ffa36ff6fa70 for instance with vm_state building and task_state spawning. [ 2100.000879] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180186, 'name': Rename_Task, 'duration_secs': 0.140886} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.003799] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2100.005414] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fa1f305-e9fd-4c9b-bd1b-0c304dc98fe0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.014794] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2100.014794] env[62875]: value = "task-2180188" [ 2100.014794] env[62875]: _type = "Task" [ 2100.014794] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.017956] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180187, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.029291] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.037104] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9fadf5-93a0-4d76-b948-bd0b116f9768 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.045288] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cf4131-57a0-4aa7-b213-49b804eaeda4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.080194] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f451c8e0-5141-4831-acba-1813859a1a1c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.089704] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a5976b-0204-417f-987f-e3e87f19325b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.104911] env[62875]: DEBUG nova.compute.provider_tree [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2100.211321] env[62875]: DEBUG nova.network.neutron [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Successfully updated port: 55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2100.428379] env[62875]: DEBUG oslo_vmware.api [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180185, 'name': PowerOnVM_Task, 'duration_secs': 0.845906} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.428833] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2100.429179] env[62875]: INFO nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Took 7.91 seconds to spawn the instance on the hypervisor. [ 2100.429771] env[62875]: DEBUG nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2100.430915] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3687a9d-4e7f-4e5f-806e-9072b7566a4e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.518581] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.797606} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.522346] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238/a19f5bee-ece8-4aa3-8c33-9474da385238.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2100.522346] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2100.522695] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8e83324-de04-4bfd-b037-8d4f0543b9f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.530926] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180188, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.532464] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2100.532464] env[62875]: value = "task-2180189" [ 2100.532464] env[62875]: _type = "Task" [ 2100.532464] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.541501] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180189, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.612399] env[62875]: DEBUG nova.scheduler.client.report [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2100.714294] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2100.714498] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2100.715538] env[62875]: DEBUG nova.network.neutron [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2100.952239] env[62875]: INFO nova.compute.manager [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Took 35.68 seconds to build instance. [ 2101.028499] env[62875]: DEBUG oslo_vmware.api [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180188, 'name': PowerOnVM_Task, 'duration_secs': 0.620405} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.028780] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2101.029076] env[62875]: INFO nova.compute.manager [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Took 6.08 seconds to spawn the instance on the hypervisor. [ 2101.029343] env[62875]: DEBUG nova.compute.manager [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2101.030264] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f4fe62-eb0a-49b5-b048-ee4ad67baf95 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.046134] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180189, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104464} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.046424] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2101.047327] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a40a2f5-05c6-4eff-af7e-bd07fc4f0ceb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.068998] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238/a19f5bee-ece8-4aa3-8c33-9474da385238.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2101.069363] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d35937f-7e7a-446a-b28a-7e0adb510a24 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.091843] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2101.091843] env[62875]: value = "task-2180190" [ 2101.091843] env[62875]: _type = "Task" [ 2101.091843] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.100952] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180190, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.118034] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.118436] env[62875]: DEBUG nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2101.121400] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.985s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.121607] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.124615] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.676s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.126067] env[62875]: INFO nova.compute.claims [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2101.159731] env[62875]: INFO nova.scheduler.client.report [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Deleted allocations for instance 2cf54268-5499-49c9-8029-68b3866581d0 [ 2101.268232] env[62875]: DEBUG nova.network.neutron [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2101.446273] env[62875]: DEBUG nova.network.neutron [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updating instance_info_cache with network_info: [{"id": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "address": "fa:16:3e:94:22:ba", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ce29a2-11", "ovs_interfaceid": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2101.454041] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed784fae-964c-4cd1-ac6d-9355c38687d8 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "45403db3-ff20-42d3-8a37-8db671d8c1fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.088s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.551079] env[62875]: INFO nova.compute.manager [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Took 32.90 seconds to build instance. [ 2101.603600] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180190, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.631485] env[62875]: DEBUG nova.compute.utils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2101.636350] env[62875]: DEBUG nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2101.636532] env[62875]: DEBUG nova.network.neutron [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2101.670849] env[62875]: DEBUG oslo_concurrency.lockutils [None req-879e8ac1-5c9e-48f8-bcd4-89b26d5c3e91 tempest-AttachInterfacesV270Test-1492586754 tempest-AttachInterfacesV270Test-1492586754-project-member] Lock "2cf54268-5499-49c9-8029-68b3866581d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.176s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.907706] env[62875]: DEBUG nova.policy [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b83dfb1443184b228f071a5a430a27f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8c6003356bb4c6091c3eaa536e78032', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2101.952500] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Releasing lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2101.953035] env[62875]: DEBUG nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Instance network_info: |[{"id": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "address": "fa:16:3e:94:22:ba", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ce29a2-11", "ovs_interfaceid": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2101.954369] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:22:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a64108f9-df0a-4feb-bbb5-97f5841c356c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55ce29a2-111a-4739-a7fc-ffa36ff6fa70', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2101.964567] env[62875]: DEBUG oslo.service.loopingcall [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2101.964933] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2101.965520] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07c66e17-a88c-48b2-a2e6-e8415eb4293b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.992353] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2101.992353] env[62875]: value = "task-2180191" [ 2101.992353] env[62875]: _type = "Task" [ 2101.992353] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.007279] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180191, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.020618] env[62875]: DEBUG nova.compute.manager [req-b0abcc97-b0c1-48c0-a4e0-efe0ea3b4329 req-7a851285-0963-4baf-8239-65d514015feb service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Received event network-changed-76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2102.020892] env[62875]: DEBUG nova.compute.manager [req-b0abcc97-b0c1-48c0-a4e0-efe0ea3b4329 req-7a851285-0963-4baf-8239-65d514015feb service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Refreshing instance network info cache due to event network-changed-76f0c221-d292-49e7-9f24-d43fa768395b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2102.021377] env[62875]: DEBUG oslo_concurrency.lockutils [req-b0abcc97-b0c1-48c0-a4e0-efe0ea3b4329 req-7a851285-0963-4baf-8239-65d514015feb service nova] Acquiring lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2102.021794] env[62875]: DEBUG oslo_concurrency.lockutils [req-b0abcc97-b0c1-48c0-a4e0-efe0ea3b4329 req-7a851285-0963-4baf-8239-65d514015feb service nova] Acquired lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2102.022226] env[62875]: DEBUG nova.network.neutron [req-b0abcc97-b0c1-48c0-a4e0-efe0ea3b4329 req-7a851285-0963-4baf-8239-65d514015feb service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Refreshing network info cache for port 76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2102.040658] env[62875]: DEBUG nova.compute.manager [req-e92ac57d-cd3a-4e3b-9663-5b68bdf8c232 req-00039445-3ebd-4ec9-bff5-fe66f02251fe service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Received event network-changed-55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2102.040871] env[62875]: DEBUG nova.compute.manager [req-e92ac57d-cd3a-4e3b-9663-5b68bdf8c232 req-00039445-3ebd-4ec9-bff5-fe66f02251fe service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Refreshing instance network info cache due to event network-changed-55ce29a2-111a-4739-a7fc-ffa36ff6fa70. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2102.041250] env[62875]: DEBUG oslo_concurrency.lockutils [req-e92ac57d-cd3a-4e3b-9663-5b68bdf8c232 req-00039445-3ebd-4ec9-bff5-fe66f02251fe service nova] Acquiring lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2102.041766] env[62875]: DEBUG oslo_concurrency.lockutils [req-e92ac57d-cd3a-4e3b-9663-5b68bdf8c232 req-00039445-3ebd-4ec9-bff5-fe66f02251fe service nova] Acquired lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2102.041766] env[62875]: DEBUG nova.network.neutron [req-e92ac57d-cd3a-4e3b-9663-5b68bdf8c232 req-00039445-3ebd-4ec9-bff5-fe66f02251fe service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Refreshing network info cache for port 55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2102.053156] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4c2fd591-8243-4299-9262-cb66168e7471 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.613s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.104158] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180190, 'name': ReconfigVM_Task, 'duration_secs': 0.676954} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.104437] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Reconfigured VM instance instance-0000003c to attach disk [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238/a19f5bee-ece8-4aa3-8c33-9474da385238.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2102.105082] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2b8cfc2-65a4-4949-9fc5-6f0247e4d4ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.112214] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2102.112214] env[62875]: value = "task-2180192" [ 2102.112214] env[62875]: _type = "Task" [ 2102.112214] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.121410] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180192, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.146595] env[62875]: DEBUG nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2102.504016] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9970bd-b09a-4905-9446-dae66aa7058e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.514547] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180191, 'name': CreateVM_Task, 'duration_secs': 0.440661} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.514952] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2102.516054] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9cbb8e-5a9d-4ffc-a933-11c3ad5be119 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.521630] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2102.521946] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2102.524152] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2102.524152] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d355e5a2-18a5-4ac6-926b-36518e3cb304 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.560175] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de09863-14d0-4afe-9696-01456db0749d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.563529] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2102.563529] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521d9d46-238d-3b0a-9577-58da816048bf" [ 2102.563529] env[62875]: _type = "Task" [ 2102.563529] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.574578] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc756c50-c622-4f3c-942a-66ef82db62c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.582233] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521d9d46-238d-3b0a-9577-58da816048bf, 'name': SearchDatastore_Task, 'duration_secs': 0.016676} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.583169] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2102.583539] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2102.583886] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2102.584165] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2102.584465] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2102.592606] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-649cf758-8ee5-43f8-80f6-4c7be802c9df {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.595195] env[62875]: DEBUG nova.compute.provider_tree [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2102.602573] env[62875]: DEBUG nova.network.neutron [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Successfully created port: edc7469f-1104-497d-b8c6-1404fc3223ca {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2102.608454] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2102.608454] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2102.611029] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afde1b9c-5c0e-4844-a6ff-e675cc820cf6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.617735] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2102.617735] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520f1426-b4e2-4259-5dc3-68d9be95e73e" [ 2102.617735] env[62875]: _type = "Task" [ 2102.617735] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.624243] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180192, 'name': Rename_Task, 'duration_secs': 0.185853} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.624697] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2102.624931] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c844510-9f34-4016-88d5-3c02e232d770 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.630166] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520f1426-b4e2-4259-5dc3-68d9be95e73e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.636203] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2102.636203] env[62875]: value = "task-2180193" [ 2102.636203] env[62875]: _type = "Task" [ 2102.636203] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.649282] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.825564] env[62875]: DEBUG nova.network.neutron [req-e92ac57d-cd3a-4e3b-9663-5b68bdf8c232 req-00039445-3ebd-4ec9-bff5-fe66f02251fe service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updated VIF entry in instance network info cache for port 55ce29a2-111a-4739-a7fc-ffa36ff6fa70. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2102.826070] env[62875]: DEBUG nova.network.neutron [req-e92ac57d-cd3a-4e3b-9663-5b68bdf8c232 req-00039445-3ebd-4ec9-bff5-fe66f02251fe service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updating instance_info_cache with network_info: [{"id": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "address": "fa:16:3e:94:22:ba", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ce29a2-11", "ovs_interfaceid": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2102.923990] env[62875]: DEBUG nova.compute.manager [None req-ff76302d-03df-472b-b153-9e16ea6f03fc tempest-ServerDiagnosticsV248Test-608639203 tempest-ServerDiagnosticsV248Test-608639203-project-admin] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2102.925596] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4412bda5-0c40-4864-bf20-9be6e887f4b3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.933454] env[62875]: INFO nova.compute.manager [None req-ff76302d-03df-472b-b153-9e16ea6f03fc tempest-ServerDiagnosticsV248Test-608639203 tempest-ServerDiagnosticsV248Test-608639203-project-admin] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Retrieving diagnostics [ 2102.934277] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f9a1ad-ced0-4b53-ad41-d9bb195fc393 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.102222] env[62875]: DEBUG nova.scheduler.client.report [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2103.129638] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520f1426-b4e2-4259-5dc3-68d9be95e73e, 'name': SearchDatastore_Task, 'duration_secs': 0.013963} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.130553] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21cb5cc0-7705-4a64-a2c6-9947f3191daa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.142697] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2103.142697] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ca5606-0834-fd21-fbb3-7633493a9086" [ 2103.142697] env[62875]: _type = "Task" [ 2103.142697] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.149949] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180193, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.156011] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ca5606-0834-fd21-fbb3-7633493a9086, 'name': SearchDatastore_Task, 'duration_secs': 0.014258} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.156366] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2103.156683] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286/76a058aa-9fdf-4a3d-9d1b-a50bb9f61286.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2103.157075] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2338cf38-2360-4cd3-a188-39458ad981e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.160212] env[62875]: DEBUG nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2103.169439] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2103.169439] env[62875]: value = "task-2180194" [ 2103.169439] env[62875]: _type = "Task" [ 2103.169439] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.188463] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.191322] env[62875]: DEBUG nova.network.neutron [req-b0abcc97-b0c1-48c0-a4e0-efe0ea3b4329 req-7a851285-0963-4baf-8239-65d514015feb service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Updated VIF entry in instance network info cache for port 76f0c221-d292-49e7-9f24-d43fa768395b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2103.191738] env[62875]: DEBUG nova.network.neutron [req-b0abcc97-b0c1-48c0-a4e0-efe0ea3b4329 req-7a851285-0963-4baf-8239-65d514015feb service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Updating instance_info_cache with network_info: [{"id": "76f0c221-d292-49e7-9f24-d43fa768395b", "address": "fa:16:3e:84:0b:36", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f0c221-d2", "ovs_interfaceid": "76f0c221-d292-49e7-9f24-d43fa768395b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2103.200462] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2103.200723] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2103.200880] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2103.201074] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2103.201314] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2103.201451] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2103.201575] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2103.201813] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2103.202000] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2103.202201] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2103.202376] env[62875]: DEBUG nova.virt.hardware [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2103.203288] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabcf4b6-1be1-4afd-b1d4-f1990f562a80 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.212833] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ecdb13-0f6c-40cb-95d9-647a1292bd36 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.328950] env[62875]: DEBUG oslo_concurrency.lockutils [req-e92ac57d-cd3a-4e3b-9663-5b68bdf8c232 req-00039445-3ebd-4ec9-bff5-fe66f02251fe service nova] Releasing lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2103.607470] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.607967] env[62875]: DEBUG nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2103.611606] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.577s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.611812] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.613984] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.279s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.614241] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.616787] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.236s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.618460] env[62875]: INFO nova.compute.claims [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2103.650871] env[62875]: DEBUG oslo_vmware.api [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180193, 'name': PowerOnVM_Task, 'duration_secs': 0.579806} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.651697] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2103.652735] env[62875]: INFO nova.compute.manager [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Took 6.32 seconds to spawn the instance on the hypervisor. [ 2103.652735] env[62875]: DEBUG nova.compute.manager [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2103.654654] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5650ba-2299-4eb1-a5ac-c6db5004251c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.659760] env[62875]: INFO nova.scheduler.client.report [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Deleted allocations for instance 2106a09b-554e-41dd-aa3a-c190b62d0afc [ 2103.662885] env[62875]: INFO nova.scheduler.client.report [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted allocations for instance 8361611a-ad16-43ef-94e0-f2e7e9851682 [ 2103.691305] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180194, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.695064] env[62875]: DEBUG oslo_concurrency.lockutils [req-b0abcc97-b0c1-48c0-a4e0-efe0ea3b4329 req-7a851285-0963-4baf-8239-65d514015feb service nova] Releasing lock "refresh_cache-45403db3-ff20-42d3-8a37-8db671d8c1fa" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2104.059023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquiring lock "1230e54c-701a-4406-95bd-14e32914bc8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.059023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "1230e54c-701a-4406-95bd-14e32914bc8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.059023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquiring lock "1230e54c-701a-4406-95bd-14e32914bc8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.059023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "1230e54c-701a-4406-95bd-14e32914bc8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.059023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "1230e54c-701a-4406-95bd-14e32914bc8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.060205] env[62875]: INFO nova.compute.manager [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Terminating instance [ 2104.122948] env[62875]: DEBUG nova.compute.utils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2104.124456] env[62875]: DEBUG nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2104.124633] env[62875]: DEBUG nova.network.neutron [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2104.175532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cb564ef-0d1b-40be-aca9-1a7917563d48 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "2106a09b-554e-41dd-aa3a-c190b62d0afc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.340s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.184733] env[62875]: DEBUG oslo_concurrency.lockutils [None req-14eb6195-6a3d-4f15-8b51-81039d5b51a8 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "8361611a-ad16-43ef-94e0-f2e7e9851682" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.758s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.196668] env[62875]: DEBUG nova.policy [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e818b6d10af42bb9c86e79ae93de507', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7523e34b88d84ec1ae28221d8d1a3591', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2104.202356] env[62875]: INFO nova.compute.manager [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Took 32.44 seconds to build instance. [ 2104.207779] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747966} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.208073] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286/76a058aa-9fdf-4a3d-9d1b-a50bb9f61286.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2104.208400] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2104.208624] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9ac091c-d22a-46a5-a465-606b9f4831ee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.217580] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2104.217580] env[62875]: value = "task-2180195" [ 2104.217580] env[62875]: _type = "Task" [ 2104.217580] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.230662] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.569653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquiring lock "refresh_cache-1230e54c-701a-4406-95bd-14e32914bc8d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.569653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquired lock "refresh_cache-1230e54c-701a-4406-95bd-14e32914bc8d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.569653] env[62875]: DEBUG nova.network.neutron [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2104.630069] env[62875]: DEBUG nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2104.705160] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1ad569d-8931-4d4d-9429-0f47df1c780d tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "a19f5bee-ece8-4aa3-8c33-9474da385238" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.931s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.737021] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063485} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.737415] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2104.738747] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769127a7-a3bf-4700-bc8a-2123e6509ba9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.761957] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286/76a058aa-9fdf-4a3d-9d1b-a50bb9f61286.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2104.765443] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60e99772-53fe-455d-8b4a-8932a02aa196 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.788560] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2104.788560] env[62875]: value = "task-2180196" [ 2104.788560] env[62875]: _type = "Task" [ 2104.788560] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.799155] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180196, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.997844] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fddca3e-0b0b-4f33-9b65-b6c51c3e9646 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.006501] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bb2589-a704-4132-a13e-c388d0283273 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.051287] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2069708d-80a6-42aa-aae8-5e34033e9368 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.060070] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f2fda2-590d-4545-ac60-db976bd47579 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.077081] env[62875]: DEBUG nova.compute.provider_tree [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2105.080079] env[62875]: DEBUG nova.network.neutron [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Successfully created port: 2a8a41b9-756a-4ad7-b2f3-3a05d58a308d {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2105.109040] env[62875]: DEBUG nova.network.neutron [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2105.130795] env[62875]: DEBUG nova.compute.manager [req-06115eb2-a7e0-4464-a3a5-583c33c91821 req-22db6fdd-52a0-4ee3-8883-07b37330ca39 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Received event network-vif-plugged-edc7469f-1104-497d-b8c6-1404fc3223ca {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2105.131091] env[62875]: DEBUG oslo_concurrency.lockutils [req-06115eb2-a7e0-4464-a3a5-583c33c91821 req-22db6fdd-52a0-4ee3-8883-07b37330ca39 service nova] Acquiring lock "e811f624-2dda-468c-ab28-9744c300eb1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.131437] env[62875]: DEBUG oslo_concurrency.lockutils [req-06115eb2-a7e0-4464-a3a5-583c33c91821 req-22db6fdd-52a0-4ee3-8883-07b37330ca39 service nova] Lock "e811f624-2dda-468c-ab28-9744c300eb1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.131644] env[62875]: DEBUG oslo_concurrency.lockutils [req-06115eb2-a7e0-4464-a3a5-583c33c91821 req-22db6fdd-52a0-4ee3-8883-07b37330ca39 service nova] Lock "e811f624-2dda-468c-ab28-9744c300eb1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.131909] env[62875]: DEBUG nova.compute.manager [req-06115eb2-a7e0-4464-a3a5-583c33c91821 req-22db6fdd-52a0-4ee3-8883-07b37330ca39 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] No waiting events found dispatching network-vif-plugged-edc7469f-1104-497d-b8c6-1404fc3223ca {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2105.132135] env[62875]: WARNING nova.compute.manager [req-06115eb2-a7e0-4464-a3a5-583c33c91821 req-22db6fdd-52a0-4ee3-8883-07b37330ca39 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Received unexpected event network-vif-plugged-edc7469f-1104-497d-b8c6-1404fc3223ca for instance with vm_state building and task_state spawning. [ 2105.207219] env[62875]: DEBUG nova.network.neutron [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2105.304256] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.587173] env[62875]: DEBUG nova.scheduler.client.report [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2105.594623] env[62875]: DEBUG nova.network.neutron [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Successfully updated port: edc7469f-1104-497d-b8c6-1404fc3223ca {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2105.646243] env[62875]: DEBUG nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2105.673578] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2105.673859] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2105.674038] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2105.674227] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2105.674380] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2105.674526] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2105.674731] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2105.674892] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2105.675164] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2105.675301] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2105.675481] env[62875]: DEBUG nova.virt.hardware [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2105.676381] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd77517-893d-4f3e-b197-3f3f798902a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.685536] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56c5371-771c-483c-b506-13bb7a0d223a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.709687] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Releasing lock "refresh_cache-1230e54c-701a-4406-95bd-14e32914bc8d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.710208] env[62875]: DEBUG nova.compute.manager [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2105.710431] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2105.711313] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2741ced9-039e-4f57-b9c6-ccfd1c5426ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.719276] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2105.719559] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6879917-b7c4-4aa5-9b9a-45dc37d8c60c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.727034] env[62875]: DEBUG oslo_vmware.api [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2105.727034] env[62875]: value = "task-2180197" [ 2105.727034] env[62875]: _type = "Task" [ 2105.727034] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.736387] env[62875]: DEBUG oslo_vmware.api [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180197, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.747981] env[62875]: DEBUG nova.compute.manager [req-67607a61-b053-4010-8fa0-b6f179307ca2 req-ad04e9ab-2849-4dca-8c61-4e915f8f0468 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Received event network-changed-edc7469f-1104-497d-b8c6-1404fc3223ca {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2105.748352] env[62875]: DEBUG nova.compute.manager [req-67607a61-b053-4010-8fa0-b6f179307ca2 req-ad04e9ab-2849-4dca-8c61-4e915f8f0468 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Refreshing instance network info cache due to event network-changed-edc7469f-1104-497d-b8c6-1404fc3223ca. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2105.748698] env[62875]: DEBUG oslo_concurrency.lockutils [req-67607a61-b053-4010-8fa0-b6f179307ca2 req-ad04e9ab-2849-4dca-8c61-4e915f8f0468 service nova] Acquiring lock "refresh_cache-e811f624-2dda-468c-ab28-9744c300eb1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.748929] env[62875]: DEBUG oslo_concurrency.lockutils [req-67607a61-b053-4010-8fa0-b6f179307ca2 req-ad04e9ab-2849-4dca-8c61-4e915f8f0468 service nova] Acquired lock "refresh_cache-e811f624-2dda-468c-ab28-9744c300eb1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.749162] env[62875]: DEBUG nova.network.neutron [req-67607a61-b053-4010-8fa0-b6f179307ca2 req-ad04e9ab-2849-4dca-8c61-4e915f8f0468 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Refreshing network info cache for port edc7469f-1104-497d-b8c6-1404fc3223ca {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2105.798281] env[62875]: INFO nova.compute.manager [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Rebuilding instance [ 2105.808180] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180196, 'name': ReconfigVM_Task, 'duration_secs': 0.763637} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.808180] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286/76a058aa-9fdf-4a3d-9d1b-a50bb9f61286.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2105.808180] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54e29524-0b1b-413b-b7e0-b7b949a51a0f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.813924] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2105.813924] env[62875]: value = "task-2180198" [ 2105.813924] env[62875]: _type = "Task" [ 2105.813924] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.829575] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180198, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.862396] env[62875]: DEBUG nova.compute.manager [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2105.863388] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a3d707-2eca-4462-8301-180c22c07770 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.096690] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.097245] env[62875]: DEBUG nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2106.100057] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquiring lock "refresh_cache-e811f624-2dda-468c-ab28-9744c300eb1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.100379] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.070s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.101931] env[62875]: INFO nova.compute.claims [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2106.241126] env[62875]: DEBUG oslo_vmware.api [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180197, 'name': PowerOffVM_Task, 'duration_secs': 0.192654} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.241395] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2106.241433] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2106.241702] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6210c6d-fc9e-4ef3-88e0-e043cbba0344 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.266574] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2106.266823] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2106.267026] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Deleting the datastore file [datastore2] 1230e54c-701a-4406-95bd-14e32914bc8d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2106.267313] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cb99c3f-a853-4cce-aa7f-029d0e2725f3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.275629] env[62875]: DEBUG oslo_vmware.api [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for the task: (returnval){ [ 2106.275629] env[62875]: value = "task-2180200" [ 2106.275629] env[62875]: _type = "Task" [ 2106.275629] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.284265] env[62875]: DEBUG oslo_vmware.api [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.312347] env[62875]: DEBUG nova.network.neutron [req-67607a61-b053-4010-8fa0-b6f179307ca2 req-ad04e9ab-2849-4dca-8c61-4e915f8f0468 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2106.323876] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180198, 'name': Rename_Task, 'duration_secs': 0.20185} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.324198] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2106.324483] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a9aeac0-654a-4470-ac40-bffae9a8a055 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.331043] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2106.331043] env[62875]: value = "task-2180201" [ 2106.331043] env[62875]: _type = "Task" [ 2106.331043] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.340946] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.504815] env[62875]: DEBUG nova.network.neutron [req-67607a61-b053-4010-8fa0-b6f179307ca2 req-ad04e9ab-2849-4dca-8c61-4e915f8f0468 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2106.606304] env[62875]: DEBUG nova.compute.utils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2106.609696] env[62875]: DEBUG nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2106.609696] env[62875]: DEBUG nova.network.neutron [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2106.706444] env[62875]: DEBUG nova.policy [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991b3033e865471e983d8f605d1d690b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82e42c29c6dd480c87096bea1977074d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2106.787043] env[62875]: DEBUG oslo_vmware.api [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Task: {'id': task-2180200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400726} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.787494] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2106.788162] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2106.788162] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2106.788499] env[62875]: INFO nova.compute.manager [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2106.790514] env[62875]: DEBUG oslo.service.loopingcall [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2106.790514] env[62875]: DEBUG nova.compute.manager [-] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2106.790514] env[62875]: DEBUG nova.network.neutron [-] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2106.815369] env[62875]: DEBUG nova.network.neutron [-] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2106.842747] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180201, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.879457] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2106.879710] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27a2df30-bfaa-4b2b-a007-5c170df02faa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.886597] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2106.886597] env[62875]: value = "task-2180202" [ 2106.886597] env[62875]: _type = "Task" [ 2106.886597] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.897133] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.941198] env[62875]: DEBUG nova.network.neutron [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Successfully updated port: 2a8a41b9-756a-4ad7-b2f3-3a05d58a308d {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2107.008063] env[62875]: DEBUG oslo_concurrency.lockutils [req-67607a61-b053-4010-8fa0-b6f179307ca2 req-ad04e9ab-2849-4dca-8c61-4e915f8f0468 service nova] Releasing lock "refresh_cache-e811f624-2dda-468c-ab28-9744c300eb1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2107.008637] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquired lock "refresh_cache-e811f624-2dda-468c-ab28-9744c300eb1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2107.008863] env[62875]: DEBUG nova.network.neutron [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2107.115905] env[62875]: DEBUG nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2107.253428] env[62875]: DEBUG nova.network.neutron [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Successfully created port: 3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2107.307288] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "9dd30ca8-bf15-4a87-b055-3575445f4b79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.307597] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.321273] env[62875]: DEBUG nova.network.neutron [-] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.347545] env[62875]: DEBUG oslo_vmware.api [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180201, 'name': PowerOnVM_Task, 'duration_secs': 0.760571} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.347545] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2107.347545] env[62875]: INFO nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Took 7.66 seconds to spawn the instance on the hypervisor. [ 2107.347545] env[62875]: DEBUG nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2107.347545] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63ca68e-c65b-40b6-af5b-0e2a0712e31c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.401919] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180202, 'name': PowerOffVM_Task, 'duration_secs': 0.169569} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.402239] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2107.402477] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2107.403679] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4001d2e9-fac1-4b03-9ad8-f7a756fd5099 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.412792] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2107.415366] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b892a32-a7e3-435f-8bdf-1ad04490d60c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.444037] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-44a248f6-443c-4b7c-95f0-088f0cdb924d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2107.444188] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-44a248f6-443c-4b7c-95f0-088f0cdb924d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2107.444375] env[62875]: DEBUG nova.network.neutron [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2107.450751] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2107.451070] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2107.451525] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Deleting the datastore file [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2107.451724] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ff20e21-43cd-4ae3-9d57-5995c3b19fec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.459278] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2107.459278] env[62875]: value = "task-2180204" [ 2107.459278] env[62875]: _type = "Task" [ 2107.459278] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.468321] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.484351] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de67fd4d-53a9-4cf6-8c1c-6f1adc59bcab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.494237] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bc0ae2-9afc-4a39-886a-b786d01207f4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.532224] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf723bd-2398-4d75-a5cf-f8d2dad38333 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.541240] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c777e4b2-1fed-40b4-b317-13f634a614e2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.556395] env[62875]: DEBUG nova.compute.provider_tree [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2107.575039] env[62875]: DEBUG nova.network.neutron [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2107.775306] env[62875]: DEBUG nova.network.neutron [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Updating instance_info_cache with network_info: [{"id": "edc7469f-1104-497d-b8c6-1404fc3223ca", "address": "fa:16:3e:e3:c1:cc", "network": {"id": "f321d8c2-f141-4f69-968d-f4490ed505bc", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1180722911-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8c6003356bb4c6091c3eaa536e78032", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedc7469f-11", "ovs_interfaceid": "edc7469f-1104-497d-b8c6-1404fc3223ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.799334] env[62875]: DEBUG nova.compute.manager [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Received event network-vif-plugged-2a8a41b9-756a-4ad7-b2f3-3a05d58a308d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2107.799538] env[62875]: DEBUG oslo_concurrency.lockutils [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] Acquiring lock "44a248f6-443c-4b7c-95f0-088f0cdb924d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.799808] env[62875]: DEBUG oslo_concurrency.lockutils [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] Lock "44a248f6-443c-4b7c-95f0-088f0cdb924d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.800046] env[62875]: DEBUG oslo_concurrency.lockutils [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] Lock "44a248f6-443c-4b7c-95f0-088f0cdb924d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2107.800168] env[62875]: DEBUG nova.compute.manager [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] No waiting events found dispatching network-vif-plugged-2a8a41b9-756a-4ad7-b2f3-3a05d58a308d {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2107.800395] env[62875]: WARNING nova.compute.manager [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Received unexpected event network-vif-plugged-2a8a41b9-756a-4ad7-b2f3-3a05d58a308d for instance with vm_state building and task_state spawning. [ 2107.800555] env[62875]: DEBUG nova.compute.manager [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Received event network-changed-2a8a41b9-756a-4ad7-b2f3-3a05d58a308d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2107.800709] env[62875]: DEBUG nova.compute.manager [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Refreshing instance network info cache due to event network-changed-2a8a41b9-756a-4ad7-b2f3-3a05d58a308d. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2107.800875] env[62875]: DEBUG oslo_concurrency.lockutils [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] Acquiring lock "refresh_cache-44a248f6-443c-4b7c-95f0-088f0cdb924d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2107.814227] env[62875]: DEBUG nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2107.824445] env[62875]: INFO nova.compute.manager [-] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Took 1.04 seconds to deallocate network for instance. [ 2107.867473] env[62875]: INFO nova.compute.manager [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Took 34.76 seconds to build instance. [ 2107.968540] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278707} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.968785] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2107.968971] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2107.969169] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2107.976848] env[62875]: DEBUG nova.network.neutron [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2108.059747] env[62875]: DEBUG nova.scheduler.client.report [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2108.126048] env[62875]: DEBUG nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2108.157094] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2108.157094] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2108.157094] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2108.157094] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2108.157094] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2108.159762] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2108.160075] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2108.160280] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2108.160495] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2108.160693] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2108.160898] env[62875]: DEBUG nova.virt.hardware [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2108.161917] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02256e16-516f-4e3d-84d1-1d55c996deff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.171842] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7b99f6-be2a-4992-8c50-966c440d2edd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.205135] env[62875]: DEBUG nova.network.neutron [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Updating instance_info_cache with network_info: [{"id": "2a8a41b9-756a-4ad7-b2f3-3a05d58a308d", "address": "fa:16:3e:d0:ca:ef", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a8a41b9-75", "ovs_interfaceid": "2a8a41b9-756a-4ad7-b2f3-3a05d58a308d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2108.280946] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Releasing lock "refresh_cache-e811f624-2dda-468c-ab28-9744c300eb1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2108.280946] env[62875]: DEBUG nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Instance network_info: |[{"id": "edc7469f-1104-497d-b8c6-1404fc3223ca", "address": "fa:16:3e:e3:c1:cc", "network": {"id": "f321d8c2-f141-4f69-968d-f4490ed505bc", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1180722911-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8c6003356bb4c6091c3eaa536e78032", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedc7469f-11", "ovs_interfaceid": "edc7469f-1104-497d-b8c6-1404fc3223ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2108.280946] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:c1:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2153f70-3d14-42ab-8bb3-be78296dd3b8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'edc7469f-1104-497d-b8c6-1404fc3223ca', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2108.288503] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Creating folder: Project (f8c6003356bb4c6091c3eaa536e78032). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2108.288503] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-527d4328-a2bb-4d9e-a89c-ba0516c5567a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.301442] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Created folder: Project (f8c6003356bb4c6091c3eaa536e78032) in parent group-v444854. [ 2108.302578] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Creating folder: Instances. Parent ref: group-v444934. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2108.302578] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-219220d9-88ba-4595-bf69-35e45dfc33f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.312503] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Created folder: Instances in parent group-v444934. [ 2108.312503] env[62875]: DEBUG oslo.service.loopingcall [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2108.312503] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2108.312503] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e0ee797-8af3-4632-b8e1-1cabb74ff618 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.333020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.339106] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2108.339106] env[62875]: value = "task-2180207" [ 2108.339106] env[62875]: _type = "Task" [ 2108.339106] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.347963] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180207, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.351794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.370220] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b9d840dd-16b2-4d1c-bcf9-13255edfa729 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.325s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2108.567920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2108.568524] env[62875]: DEBUG nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2108.571639] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.289s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2108.571868] env[62875]: DEBUG nova.objects.instance [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lazy-loading 'resources' on Instance uuid c217e435-c5d8-406b-99ee-ec71580fb344 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2108.707738] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-44a248f6-443c-4b7c-95f0-088f0cdb924d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2108.708183] env[62875]: DEBUG nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Instance network_info: |[{"id": "2a8a41b9-756a-4ad7-b2f3-3a05d58a308d", "address": "fa:16:3e:d0:ca:ef", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a8a41b9-75", "ovs_interfaceid": "2a8a41b9-756a-4ad7-b2f3-3a05d58a308d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2108.708740] env[62875]: DEBUG oslo_concurrency.lockutils [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] Acquired lock "refresh_cache-44a248f6-443c-4b7c-95f0-088f0cdb924d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2108.708995] env[62875]: DEBUG nova.network.neutron [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Refreshing network info cache for port 2a8a41b9-756a-4ad7-b2f3-3a05d58a308d {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2108.711084] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:ca:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a8a41b9-756a-4ad7-b2f3-3a05d58a308d', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2108.720174] env[62875]: DEBUG oslo.service.loopingcall [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2108.721930] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2108.722220] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83b7415c-1509-4b6d-ae00-c6a6999d5a03 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.746233] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2108.746233] env[62875]: value = "task-2180208" [ 2108.746233] env[62875]: _type = "Task" [ 2108.746233] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.754899] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180208, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.850557] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180207, 'name': CreateVM_Task, 'duration_secs': 0.417264} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.850743] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2108.851541] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2108.851748] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2108.852182] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2108.852477] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c580976-9bb6-479e-a2c4-93f07b7e69d5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.863526] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2108.863526] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5260c1a4-e5f1-e97e-2a82-f8eb6fa76702" [ 2108.863526] env[62875]: _type = "Task" [ 2108.863526] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.877297] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5260c1a4-e5f1-e97e-2a82-f8eb6fa76702, 'name': SearchDatastore_Task, 'duration_secs': 0.013381} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.877725] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2108.877861] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2108.878268] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2108.878268] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2108.878416] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2108.878739] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-847c6daf-d8a2-42dc-ab2b-a2c86c4fe73a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.887444] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2108.887741] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2108.888513] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0787508a-18eb-47f9-8567-64aa281caaa7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.894839] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2108.894839] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528f77b8-7d72-b093-9e5e-39645916c9f9" [ 2108.894839] env[62875]: _type = "Task" [ 2108.894839] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.903943] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528f77b8-7d72-b093-9e5e-39645916c9f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.000302] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2109.000576] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2109.000732] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2109.000929] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2109.001119] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2109.001271] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2109.001478] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2109.001635] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2109.001800] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2109.001963] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2109.002148] env[62875]: DEBUG nova.virt.hardware [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2109.003347] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266d1ea4-cb0a-45ab-b010-058522bf5046 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.011359] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5df2941-e708-4dc1-97cc-4632b5fe1f2a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.027061] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2109.032742] env[62875]: DEBUG oslo.service.loopingcall [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2109.033026] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2109.033419] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-953f6c47-b4af-4562-9bff-c6eabe5a9202 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.045869] env[62875]: DEBUG nova.network.neutron [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Successfully updated port: 3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2109.051742] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2109.051742] env[62875]: value = "task-2180209" [ 2109.051742] env[62875]: _type = "Task" [ 2109.051742] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.061186] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180209, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.077161] env[62875]: DEBUG nova.compute.utils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2109.081620] env[62875]: DEBUG nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2109.081846] env[62875]: DEBUG nova.network.neutron [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2109.090658] env[62875]: DEBUG oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52818304-5eae-923e-7603-a7b1e8adcb93/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2109.091480] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa88c1bf-c388-4097-9a47-0e41ac57dcd5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.097973] env[62875]: DEBUG oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52818304-5eae-923e-7603-a7b1e8adcb93/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2109.098186] env[62875]: ERROR oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52818304-5eae-923e-7603-a7b1e8adcb93/disk-0.vmdk due to incomplete transfer. [ 2109.098650] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-eaf7351a-e46a-4af4-a44a-1d8b77e750f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.109128] env[62875]: DEBUG oslo_vmware.rw_handles [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52818304-5eae-923e-7603-a7b1e8adcb93/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2109.109336] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Uploaded image f5485f99-7141-4502-914d-f249d8bdef3f to the Glance image server {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2109.111668] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2109.111917] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-538b2f54-bdc4-4988-81eb-d5dd23837192 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.118086] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2109.118086] env[62875]: value = "task-2180210" [ 2109.118086] env[62875]: _type = "Task" [ 2109.118086] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.124737] env[62875]: DEBUG nova.policy [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a616aeda10dd4092b13af60fda9163d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d453e43d9cf49f8838d738ee308bf22', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2109.132566] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180210, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.256526] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180208, 'name': CreateVM_Task, 'duration_secs': 0.450993} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.256709] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2109.257396] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2109.257643] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2109.257865] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2109.258132] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5bf89a4-ed6a-4160-9b8e-2fff9e037122 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.264754] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2109.264754] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ab5411-c7cf-7a68-57b5-2ab7d3bd20b1" [ 2109.264754] env[62875]: _type = "Task" [ 2109.264754] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.275413] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ab5411-c7cf-7a68-57b5-2ab7d3bd20b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.354421] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785eafa3-46f7-4045-a687-66a523c939ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.361960] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d38d2c2-c4a2-4af6-b03d-2f0cdb3dae23 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.365778] env[62875]: DEBUG oslo_concurrency.lockutils [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2109.366062] env[62875]: DEBUG oslo_concurrency.lockutils [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2109.366330] env[62875]: INFO nova.compute.manager [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Rebooting instance [ 2109.398236] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2969778-5316-4399-8cbb-984db75989dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.414334] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2048a25b-c5b1-4e64-9827-70562cb0dbea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.419063] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528f77b8-7d72-b093-9e5e-39645916c9f9, 'name': SearchDatastore_Task, 'duration_secs': 0.016044} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.420190] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a219adb-ddbe-4e73-b04b-9d1e0a42f478 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.429779] env[62875]: DEBUG nova.compute.provider_tree [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2109.435972] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2109.435972] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529964c8-4c71-3d0e-8ad8-45882598ff07" [ 2109.435972] env[62875]: _type = "Task" [ 2109.435972] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.444066] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529964c8-4c71-3d0e-8ad8-45882598ff07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.549105] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2109.549267] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquired lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2109.549421] env[62875]: DEBUG nova.network.neutron [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2109.562423] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180209, 'name': CreateVM_Task, 'duration_secs': 0.321796} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.562512] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2109.562934] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2109.582111] env[62875]: DEBUG nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2109.629870] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180210, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.675300] env[62875]: DEBUG nova.network.neutron [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Successfully created port: db4ba514-bcb8-4bac-a3dc-400ffde442b7 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2109.689945] env[62875]: DEBUG nova.network.neutron [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Updated VIF entry in instance network info cache for port 2a8a41b9-756a-4ad7-b2f3-3a05d58a308d. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2109.689945] env[62875]: DEBUG nova.network.neutron [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Updating instance_info_cache with network_info: [{"id": "2a8a41b9-756a-4ad7-b2f3-3a05d58a308d", "address": "fa:16:3e:d0:ca:ef", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a8a41b9-75", "ovs_interfaceid": "2a8a41b9-756a-4ad7-b2f3-3a05d58a308d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2109.787518] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ab5411-c7cf-7a68-57b5-2ab7d3bd20b1, 'name': SearchDatastore_Task, 'duration_secs': 0.014742} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.787518] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2109.787518] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2109.787518] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2109.787518] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2109.787518] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2109.787940] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-511eb902-57f4-4b7e-8f31-d99d6ef9f3ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.794864] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2109.794864] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526ecdb5-5e80-5117-f59e-67de217b62ad" [ 2109.794864] env[62875]: _type = "Task" [ 2109.794864] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.804321] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526ecdb5-5e80-5117-f59e-67de217b62ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.896794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2109.896945] env[62875]: DEBUG oslo_concurrency.lockutils [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquired lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2109.897234] env[62875]: DEBUG nova.network.neutron [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2109.901034] env[62875]: DEBUG nova.compute.manager [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Received event network-changed-55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2109.901339] env[62875]: DEBUG nova.compute.manager [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Refreshing instance network info cache due to event network-changed-55ce29a2-111a-4739-a7fc-ffa36ff6fa70. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2109.901615] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Acquiring lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2109.935075] env[62875]: DEBUG nova.scheduler.client.report [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2109.952542] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529964c8-4c71-3d0e-8ad8-45882598ff07, 'name': SearchDatastore_Task, 'duration_secs': 0.077825} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.952913] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2109.953262] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] e811f624-2dda-468c-ab28-9744c300eb1d/e811f624-2dda-468c-ab28-9744c300eb1d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2109.953747] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2109.953885] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2109.954161] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9937f74-cde1-4291-8c5a-97f0505e3303 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.957022] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c6df24b-361e-4c04-8c67-98ca25cc3ecb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.966253] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2109.966253] env[62875]: value = "task-2180211" [ 2109.966253] env[62875]: _type = "Task" [ 2109.966253] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.967832] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2109.968020] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2109.972360] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f176ff4d-4550-4aee-bd71-6cc9fe5caf05 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.982454] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.984096] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2109.984096] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5204f161-51b6-bd7c-8572-95081b068864" [ 2109.984096] env[62875]: _type = "Task" [ 2109.984096] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.992377] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5204f161-51b6-bd7c-8572-95081b068864, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.128755] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180210, 'name': Destroy_Task, 'duration_secs': 0.548561} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.129751] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Destroyed the VM [ 2110.130161] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2110.130504] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-53ef35e0-6858-4f78-bf44-03381961d52f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.137262] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2110.137262] env[62875]: value = "task-2180212" [ 2110.137262] env[62875]: _type = "Task" [ 2110.137262] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.145822] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180212, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.194295] env[62875]: DEBUG oslo_concurrency.lockutils [req-db5b2556-3843-4dbe-ab9e-d5fe3f788320 req-737bd068-dfd8-4c2f-9c73-7acbcd39e355 service nova] Releasing lock "refresh_cache-44a248f6-443c-4b7c-95f0-088f0cdb924d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2110.220218] env[62875]: DEBUG nova.network.neutron [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2110.306807] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526ecdb5-5e80-5117-f59e-67de217b62ad, 'name': SearchDatastore_Task, 'duration_secs': 0.026393} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.307363] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2110.307754] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2110.308477] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2110.444929] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.873s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.449748] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.828s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.451581] env[62875]: INFO nova.compute.claims [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2110.480942] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180211, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507195} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.482334] env[62875]: INFO nova.scheduler.client.report [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Deleted allocations for instance c217e435-c5d8-406b-99ee-ec71580fb344 [ 2110.483217] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] e811f624-2dda-468c-ab28-9744c300eb1d/e811f624-2dda-468c-ab28-9744c300eb1d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2110.483480] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2110.483757] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f73ca2b5-2e82-4154-93b5-8f105a6a5fb7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.497396] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5204f161-51b6-bd7c-8572-95081b068864, 'name': SearchDatastore_Task, 'duration_secs': 0.01042} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.499143] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2110.499143] env[62875]: value = "task-2180213" [ 2110.499143] env[62875]: _type = "Task" [ 2110.499143] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.499392] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ca0e544-10d4-4d14-bb96-193bc14d96c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.509511] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2110.509511] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52946139-c27c-447a-7e4f-afc9925b671e" [ 2110.509511] env[62875]: _type = "Task" [ 2110.509511] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.512872] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180213, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.520093] env[62875]: DEBUG nova.network.neutron [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [{"id": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "address": "fa:16:3e:b9:e1:fb", "network": {"id": "260943be-4698-4425-ae4b-22d21e036685", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1758636538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82e42c29c6dd480c87096bea1977074d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e6fd20c-b2", "ovs_interfaceid": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2110.528667] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52946139-c27c-447a-7e4f-afc9925b671e, 'name': SearchDatastore_Task, 'duration_secs': 0.009527} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.530896] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2110.531337] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 44a248f6-443c-4b7c-95f0-088f0cdb924d/44a248f6-443c-4b7c-95f0-088f0cdb924d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2110.534581] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2110.534787] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2110.535075] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01396db2-0578-440a-bc28-e1454e5454ab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.538368] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57e63340-6a92-44d5-a902-1a710f58e058 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.549327] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2110.549327] env[62875]: value = "task-2180214" [ 2110.549327] env[62875]: _type = "Task" [ 2110.549327] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.549539] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2110.550379] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2110.551266] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2b66a97-3930-40ce-a919-a331b7ac4a65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.564165] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.564492] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2110.564492] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5206dc04-bbf7-e01c-f147-c036559964cb" [ 2110.564492] env[62875]: _type = "Task" [ 2110.564492] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.574448] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5206dc04-bbf7-e01c-f147-c036559964cb, 'name': SearchDatastore_Task, 'duration_secs': 0.009897} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.575285] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6e9b13a-89c1-4a40-a1f9-454ed2a74293 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.583165] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2110.583165] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e66ea0-be46-cef4-3261-95d96e9400e2" [ 2110.583165] env[62875]: _type = "Task" [ 2110.583165] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.591515] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e66ea0-be46-cef4-3261-95d96e9400e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.598337] env[62875]: DEBUG nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2110.623532] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2110.623757] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2110.623913] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2110.624104] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2110.624251] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2110.624393] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2110.624597] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2110.624756] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2110.624920] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2110.625090] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2110.625268] env[62875]: DEBUG nova.virt.hardware [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2110.626454] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b8dc78-3126-4ff8-88d0-86bc54269779 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.634880] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7c4cf6-71df-4f17-b01e-0d5561df0c42 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.655748] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180212, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.700911] env[62875]: DEBUG nova.network.neutron [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updating instance_info_cache with network_info: [{"id": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "address": "fa:16:3e:94:22:ba", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ce29a2-11", "ovs_interfaceid": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2110.996170] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2cb9e88e-c30f-4e79-9c58-42f2050c453e tempest-ServerPasswordTestJSON-1762049314 tempest-ServerPasswordTestJSON-1762049314-project-member] Lock "c217e435-c5d8-406b-99ee-ec71580fb344" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.247s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.011580] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180213, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066694} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.011784] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2111.013088] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4ad9c1-88a5-45cf-9c3f-d25785f440f7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.028139] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Releasing lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2111.028476] env[62875]: DEBUG nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Instance network_info: |[{"id": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "address": "fa:16:3e:b9:e1:fb", "network": {"id": "260943be-4698-4425-ae4b-22d21e036685", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1758636538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82e42c29c6dd480c87096bea1977074d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e6fd20c-b2", "ovs_interfaceid": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2111.037782] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] e811f624-2dda-468c-ab28-9744c300eb1d/e811f624-2dda-468c-ab28-9744c300eb1d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2111.038218] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:e1:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4e02e98f-44ce-42b7-a3ac-4034fae5d127', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e6fd20c-b2c8-44ea-947f-cf7af45bc529', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2111.045574] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Creating folder: Project (82e42c29c6dd480c87096bea1977074d). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2111.046105] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a7e6c36-b901-4ffc-ade5-003e0bf04d61 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.063625] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "409b6902-f9ef-469b-a9db-4e93f764d199" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.063868] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "409b6902-f9ef-469b-a9db-4e93f764d199" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.064088] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "409b6902-f9ef-469b-a9db-4e93f764d199-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.064351] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "409b6902-f9ef-469b-a9db-4e93f764d199-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.064580] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "409b6902-f9ef-469b-a9db-4e93f764d199-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.066486] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69e5cd3c-a96d-4b8d-be92-6f905b229f35 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.069146] env[62875]: INFO nova.compute.manager [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Terminating instance [ 2111.081578] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437364} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.082579] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 44a248f6-443c-4b7c-95f0-088f0cdb924d/44a248f6-443c-4b7c-95f0-088f0cdb924d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2111.082901] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2111.083376] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2111.083376] env[62875]: value = "task-2180215" [ 2111.083376] env[62875]: _type = "Task" [ 2111.083376] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.084426] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13193a2f-5e7e-484e-bf5e-9185fa7aefad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.092586] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Created folder: Project (82e42c29c6dd480c87096bea1977074d) in parent group-v444854. [ 2111.092830] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Creating folder: Instances. Parent ref: group-v444939. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2111.093844] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4e61c11-f19e-418b-8adb-188e46f17b9b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.099800] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2111.099800] env[62875]: value = "task-2180217" [ 2111.099800] env[62875]: _type = "Task" [ 2111.099800] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.110232] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e66ea0-be46-cef4-3261-95d96e9400e2, 'name': SearchDatastore_Task, 'duration_secs': 0.008756} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.110523] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180215, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.112677] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2111.112952] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238/a19f5bee-ece8-4aa3-8c33-9474da385238.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2111.113899] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81ddb987-b72c-484f-8593-8a110de9d825 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.118890] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.122992] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Created folder: Instances in parent group-v444939. [ 2111.123330] env[62875]: DEBUG oslo.service.loopingcall [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2111.124477] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2111.124809] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2111.124809] env[62875]: value = "task-2180219" [ 2111.124809] env[62875]: _type = "Task" [ 2111.124809] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.124991] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3366ae1-020b-4586-8b49-c0fc2d1842a9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.150825] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.154681] env[62875]: DEBUG oslo_vmware.api [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180212, 'name': RemoveSnapshot_Task, 'duration_secs': 0.643592} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.154873] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2111.154873] env[62875]: value = "task-2180220" [ 2111.154873] env[62875]: _type = "Task" [ 2111.154873] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.155147] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2111.155378] env[62875]: INFO nova.compute.manager [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Took 16.97 seconds to snapshot the instance on the hypervisor. [ 2111.165933] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180220, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.204347] env[62875]: DEBUG oslo_concurrency.lockutils [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Releasing lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2111.206193] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Acquired lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2111.206308] env[62875]: DEBUG nova.network.neutron [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Refreshing network info cache for port 55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2111.577343] env[62875]: DEBUG nova.compute.manager [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2111.577723] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2111.578567] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fde3d7-ba6f-42e1-9998-d1391b551ce4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.587622] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2111.593401] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3419d48-e087-469b-828b-37b4edc82a6e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.600593] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180215, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.613175] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066306} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.613869] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2111.614251] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2e20d8-55bd-4cc5-95b9-022951cab481 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.624509] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "346f4371-3029-4710-9163-08cf36196207" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.624791] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "346f4371-3029-4710-9163-08cf36196207" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.645512] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 44a248f6-443c-4b7c-95f0-088f0cdb924d/44a248f6-443c-4b7c-95f0-088f0cdb924d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2111.646107] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca77314c-cd7c-43e2-a7ad-5048b7da5253 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.669054] env[62875]: DEBUG nova.compute.manager [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Instance disappeared during snapshot {{(pid=62875) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 2111.684075] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488805} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.689057] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238/a19f5bee-ece8-4aa3-8c33-9474da385238.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2111.689355] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2111.689856] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180220, 'name': CreateVM_Task, 'duration_secs': 0.401298} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.690147] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2111.690147] env[62875]: value = "task-2180222" [ 2111.690147] env[62875]: _type = "Task" [ 2111.690147] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.690376] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2111.690574] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2111.690738] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleting the datastore file [datastore2] 409b6902-f9ef-469b-a9db-4e93f764d199 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2111.691799] env[62875]: DEBUG nova.compute.manager [None req-45e42ffb-b5c5-4bc5-a67e-89534357ef8e tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image not found during clean up f5485f99-7141-4502-914d-f249d8bdef3f {{(pid=62875) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 2111.695039] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b61b4a6-f699-4618-9930-f78d39059613 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.696963] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2111.697257] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d405fd89-b0ae-4a21-80ee-d4fb5012e5dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.700835] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2111.701072] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2111.701455] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2111.705355] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd4a1b06-0997-419d-a9b9-340404ae30ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.716877] env[62875]: DEBUG nova.compute.manager [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2111.718970] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2111.718970] env[62875]: value = "task-2180223" [ 2111.718970] env[62875]: _type = "Task" [ 2111.718970] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.719187] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180222, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.719935] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265db7df-685c-451c-aae1-17caec4dc6fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.723034] env[62875]: DEBUG oslo_vmware.api [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2111.723034] env[62875]: value = "task-2180224" [ 2111.723034] env[62875]: _type = "Task" [ 2111.723034] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.727632] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2111.727632] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cad418-7225-404d-cc2d-1023258fcbb5" [ 2111.727632] env[62875]: _type = "Task" [ 2111.727632] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.749105] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180223, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.755622] env[62875]: DEBUG oslo_vmware.api [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.755874] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cad418-7225-404d-cc2d-1023258fcbb5, 'name': SearchDatastore_Task, 'duration_secs': 0.022102} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.756170] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2111.756462] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2111.756662] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2111.757281] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2111.757281] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2111.760101] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3767be85-6610-440f-9f9d-6f62bde21ac8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.773215] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2111.773215] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2111.774020] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85459614-0fe4-467a-81c5-03a1ed952f6c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.783494] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2111.783494] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5207b3c1-c37f-72bd-d634-50042312e848" [ 2111.783494] env[62875]: _type = "Task" [ 2111.783494] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.793078] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5207b3c1-c37f-72bd-d634-50042312e848, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.835229] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8da612-ca3e-451f-9e0e-ac651074d01b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.845508] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6403e9e-4614-4044-8048-ec612f4d5d3f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.881148] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6deb072a-bd93-411b-bd96-429b8bba94d9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.889190] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd972cb-60fd-4eab-816f-057d899a8d32 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.903829] env[62875]: DEBUG nova.compute.provider_tree [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2111.931566] env[62875]: DEBUG nova.compute.manager [req-addde0ff-f5db-4221-afe2-1ed4184f2c2c req-7d057362-263c-4bb0-97a0-e88ff35ae1a9 service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Received event network-vif-plugged-db4ba514-bcb8-4bac-a3dc-400ffde442b7 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2111.931840] env[62875]: DEBUG oslo_concurrency.lockutils [req-addde0ff-f5db-4221-afe2-1ed4184f2c2c req-7d057362-263c-4bb0-97a0-e88ff35ae1a9 service nova] Acquiring lock "7f16b893-02e4-4395-b787-f82bc4549e4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.932033] env[62875]: DEBUG oslo_concurrency.lockutils [req-addde0ff-f5db-4221-afe2-1ed4184f2c2c req-7d057362-263c-4bb0-97a0-e88ff35ae1a9 service nova] Lock "7f16b893-02e4-4395-b787-f82bc4549e4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.932193] env[62875]: DEBUG oslo_concurrency.lockutils [req-addde0ff-f5db-4221-afe2-1ed4184f2c2c req-7d057362-263c-4bb0-97a0-e88ff35ae1a9 service nova] Lock "7f16b893-02e4-4395-b787-f82bc4549e4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.932361] env[62875]: DEBUG nova.compute.manager [req-addde0ff-f5db-4221-afe2-1ed4184f2c2c req-7d057362-263c-4bb0-97a0-e88ff35ae1a9 service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] No waiting events found dispatching network-vif-plugged-db4ba514-bcb8-4bac-a3dc-400ffde442b7 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2111.932577] env[62875]: WARNING nova.compute.manager [req-addde0ff-f5db-4221-afe2-1ed4184f2c2c req-7d057362-263c-4bb0-97a0-e88ff35ae1a9 service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Received unexpected event network-vif-plugged-db4ba514-bcb8-4bac-a3dc-400ffde442b7 for instance with vm_state building and task_state spawning. [ 2112.005922] env[62875]: DEBUG nova.network.neutron [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updated VIF entry in instance network info cache for port 55ce29a2-111a-4739-a7fc-ffa36ff6fa70. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2112.006401] env[62875]: DEBUG nova.network.neutron [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updating instance_info_cache with network_info: [{"id": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "address": "fa:16:3e:94:22:ba", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ce29a2-11", "ovs_interfaceid": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2112.052827] env[62875]: DEBUG nova.network.neutron [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Successfully updated port: db4ba514-bcb8-4bac-a3dc-400ffde442b7 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2112.097179] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180215, 'name': ReconfigVM_Task, 'duration_secs': 0.923916} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.097475] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Reconfigured VM instance instance-0000003e to attach disk [datastore1] e811f624-2dda-468c-ab28-9744c300eb1d/e811f624-2dda-468c-ab28-9744c300eb1d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2112.098099] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0227c220-db00-4dba-9735-0c2b495eb590 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.104906] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2112.104906] env[62875]: value = "task-2180225" [ 2112.104906] env[62875]: _type = "Task" [ 2112.104906] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.115908] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180225, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.150015] env[62875]: DEBUG nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2112.211475] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180222, 'name': ReconfigVM_Task, 'duration_secs': 0.370284} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.211958] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 44a248f6-443c-4b7c-95f0-088f0cdb924d/44a248f6-443c-4b7c-95f0-088f0cdb924d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2112.212731] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-add0f394-ea38-4de2-880f-973943d2bde3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.222052] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2112.222052] env[62875]: value = "task-2180226" [ 2112.222052] env[62875]: _type = "Task" [ 2112.222052] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.232018] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180226, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.239990] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180223, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067009} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.244057] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2112.244528] env[62875]: DEBUG oslo_vmware.api [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249815} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.245382] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8fbb89-7f01-47b9-a081-8db0edcc4341 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.248632] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2112.249233] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2112.249629] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2112.249898] env[62875]: INFO nova.compute.manager [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Took 0.67 seconds to destroy the instance on the hypervisor. [ 2112.250243] env[62875]: DEBUG oslo.service.loopingcall [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2112.254050] env[62875]: DEBUG nova.compute.manager [-] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2112.254431] env[62875]: DEBUG nova.network.neutron [-] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2112.278345] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238/a19f5bee-ece8-4aa3-8c33-9474da385238.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2112.278595] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d15f249a-07f3-40a1-859b-4593743a9b4f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.310226] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5207b3c1-c37f-72bd-d634-50042312e848, 'name': SearchDatastore_Task, 'duration_secs': 0.033394} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.312698] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2112.312698] env[62875]: value = "task-2180227" [ 2112.312698] env[62875]: _type = "Task" [ 2112.312698] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.312698] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38e60688-12a1-4aa4-9e55-2c91c6268972 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.321056] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2112.321056] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b54da-e611-d796-c746-b4aa0758ebef" [ 2112.321056] env[62875]: _type = "Task" [ 2112.321056] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.324246] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.334450] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b54da-e611-d796-c746-b4aa0758ebef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.409163] env[62875]: DEBUG nova.scheduler.client.report [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2112.509714] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Releasing lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2112.510177] env[62875]: DEBUG nova.compute.manager [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Received event network-vif-plugged-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2112.510443] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Acquiring lock "8f817564-b224-4dcb-bd8c-4d63509a5628-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.510667] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Lock "8f817564-b224-4dcb-bd8c-4d63509a5628-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.510834] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Lock "8f817564-b224-4dcb-bd8c-4d63509a5628-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.511009] env[62875]: DEBUG nova.compute.manager [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] No waiting events found dispatching network-vif-plugged-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2112.511184] env[62875]: WARNING nova.compute.manager [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Received unexpected event network-vif-plugged-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 for instance with vm_state building and task_state spawning. [ 2112.511345] env[62875]: DEBUG nova.compute.manager [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Received event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2112.511501] env[62875]: DEBUG nova.compute.manager [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing instance network info cache due to event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2112.511682] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Acquiring lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2112.511815] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Acquired lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2112.511968] env[62875]: DEBUG nova.network.neutron [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2112.556616] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquiring lock "refresh_cache-7f16b893-02e4-4395-b787-f82bc4549e4a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2112.556994] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquired lock "refresh_cache-7f16b893-02e4-4395-b787-f82bc4549e4a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2112.556994] env[62875]: DEBUG nova.network.neutron [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2112.617100] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180225, 'name': Rename_Task, 'duration_secs': 0.403743} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.617513] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2112.617879] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f3ff93c-53ea-41e2-a8c9-c8bdc622c5ae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.627021] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2112.627021] env[62875]: value = "task-2180228" [ 2112.627021] env[62875]: _type = "Task" [ 2112.627021] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.635589] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.730009] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180226, 'name': Rename_Task, 'duration_secs': 0.152021} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.730307] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2112.730575] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-319a7096-0e4d-4a24-83a0-9669306b68de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.737503] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2112.737503] env[62875]: value = "task-2180229" [ 2112.737503] env[62875]: _type = "Task" [ 2112.737503] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.746404] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.756263] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630d55dd-a59a-41f7-ad57-da59c7c87aa1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.763176] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Doing hard reboot of VM {{(pid=62875) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 2112.763422] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b3740fbd-79f4-46b2-9f0e-4903a50265fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.768179] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.769669] env[62875]: DEBUG oslo_vmware.api [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2112.769669] env[62875]: value = "task-2180230" [ 2112.769669] env[62875]: _type = "Task" [ 2112.769669] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.778789] env[62875]: DEBUG oslo_vmware.api [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180230, 'name': ResetVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.823555] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180227, 'name': ReconfigVM_Task, 'duration_secs': 0.260959} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.824280] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Reconfigured VM instance instance-0000003c to attach disk [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238/a19f5bee-ece8-4aa3-8c33-9474da385238.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2112.824652] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90009b1c-48b0-4a2f-a34f-bbb6af12ba51 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.834885] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b54da-e611-d796-c746-b4aa0758ebef, 'name': SearchDatastore_Task, 'duration_secs': 0.013094} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.836375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2112.836581] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628/8f817564-b224-4dcb-bd8c-4d63509a5628.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2112.836946] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2112.836946] env[62875]: value = "task-2180231" [ 2112.836946] env[62875]: _type = "Task" [ 2112.836946] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.837249] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2f679b4-c235-4e0f-bcc7-69ae408bcb8a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.848082] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180231, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.849843] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2112.849843] env[62875]: value = "task-2180232" [ 2112.849843] env[62875]: _type = "Task" [ 2112.849843] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.858620] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180232, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.914311] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.914847] env[62875]: DEBUG nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2112.917700] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.630s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.919323] env[62875]: INFO nova.compute.claims [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2113.092413] env[62875]: DEBUG nova.network.neutron [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2113.134425] env[62875]: DEBUG nova.network.neutron [-] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.145368] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180228, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.252979] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180229, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.280112] env[62875]: DEBUG oslo_vmware.api [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180230, 'name': ResetVM_Task, 'duration_secs': 0.08646} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.281077] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Did hard reboot of VM {{(pid=62875) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 2113.281077] env[62875]: DEBUG nova.compute.manager [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2113.285290] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2cb0d0-6446-4538-a22b-cb9b40f55bff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.350091] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180231, 'name': Rename_Task, 'duration_secs': 0.177165} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.351164] env[62875]: DEBUG nova.network.neutron [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Updating instance_info_cache with network_info: [{"id": "db4ba514-bcb8-4bac-a3dc-400ffde442b7", "address": "fa:16:3e:33:fe:4f", "network": {"id": "aeccbf75-8666-4d39-b693-43d06fb910bf", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1097242513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d453e43d9cf49f8838d738ee308bf22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb4ba514-bc", "ovs_interfaceid": "db4ba514-bcb8-4bac-a3dc-400ffde442b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.352423] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2113.356406] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07bd6be7-dba5-4aca-9606-18c4d324735d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.364346] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180232, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510824} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.366929] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628/8f817564-b224-4dcb-bd8c-4d63509a5628.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2113.366929] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2113.366929] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2113.366929] env[62875]: value = "task-2180233" [ 2113.366929] env[62875]: _type = "Task" [ 2113.366929] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.366929] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9650cc5-4baf-4058-8130-653455136ae7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.379594] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180233, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.381792] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2113.381792] env[62875]: value = "task-2180234" [ 2113.381792] env[62875]: _type = "Task" [ 2113.381792] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.384668] env[62875]: DEBUG nova.network.neutron [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updated VIF entry in instance network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2113.384998] env[62875]: DEBUG nova.network.neutron [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [{"id": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "address": "fa:16:3e:b9:e1:fb", "network": {"id": "260943be-4698-4425-ae4b-22d21e036685", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1758636538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82e42c29c6dd480c87096bea1977074d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e6fd20c-b2", "ovs_interfaceid": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.391286] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180234, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.424245] env[62875]: DEBUG nova.compute.utils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2113.425871] env[62875]: DEBUG nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2113.425871] env[62875]: DEBUG nova.network.neutron [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2113.506463] env[62875]: DEBUG nova.policy [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52db0a44319f46939b47247136267ceb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5062c761ea34842a2f6179ae76f3465', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2113.637649] env[62875]: INFO nova.compute.manager [-] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Took 1.38 seconds to deallocate network for instance. [ 2113.638559] env[62875]: DEBUG oslo_vmware.api [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180228, 'name': PowerOnVM_Task, 'duration_secs': 0.634322} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.639839] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2113.640070] env[62875]: INFO nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Took 10.48 seconds to spawn the instance on the hypervisor. [ 2113.640279] env[62875]: DEBUG nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2113.641218] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6c4dd8-2287-490d-b3fb-e3be4f70a3a5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.747612] env[62875]: DEBUG oslo_vmware.api [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180229, 'name': PowerOnVM_Task, 'duration_secs': 0.719309} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.747858] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2113.748072] env[62875]: INFO nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Took 8.10 seconds to spawn the instance on the hypervisor. [ 2113.748274] env[62875]: DEBUG nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2113.749126] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635cdff6-133d-44cc-ab9a-5b82c3e46c96 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.803353] env[62875]: DEBUG oslo_concurrency.lockutils [None req-039283f4-e037-4625-a1ca-c6181ac18834 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.437s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.859803] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Releasing lock "refresh_cache-7f16b893-02e4-4395-b787-f82bc4549e4a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2113.862436] env[62875]: DEBUG nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Instance network_info: |[{"id": "db4ba514-bcb8-4bac-a3dc-400ffde442b7", "address": "fa:16:3e:33:fe:4f", "network": {"id": "aeccbf75-8666-4d39-b693-43d06fb910bf", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1097242513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d453e43d9cf49f8838d738ee308bf22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb4ba514-bc", "ovs_interfaceid": "db4ba514-bcb8-4bac-a3dc-400ffde442b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2113.862436] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:fe:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db4ba514-bcb8-4bac-a3dc-400ffde442b7', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2113.868350] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Creating folder: Project (4d453e43d9cf49f8838d738ee308bf22). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2113.869143] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b72df971-a484-4f58-9785-1f94bae9ae25 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.881296] env[62875]: DEBUG oslo_vmware.api [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180233, 'name': PowerOnVM_Task, 'duration_secs': 0.439774} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.882636] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2113.882851] env[62875]: DEBUG nova.compute.manager [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2113.883132] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Created folder: Project (4d453e43d9cf49f8838d738ee308bf22) in parent group-v444854. [ 2113.883299] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Creating folder: Instances. Parent ref: group-v444942. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2113.886569] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43533b11-8dfe-4b17-819f-e7d246fefe1d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.890811] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-154eab8d-871d-442c-b57f-ef57b70e9338 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.890811] env[62875]: DEBUG oslo_concurrency.lockutils [req-42f3b4b8-cb9f-4427-984f-68d5593f2ed8 req-ac8020e9-bc21-458f-8db0-2c61fd0c0753 service nova] Releasing lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2113.895750] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180234, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082025} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.897271] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2113.901376] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6624ea77-4c48-4c48-b95c-d3982863aa4c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.903785] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Created folder: Instances in parent group-v444942. [ 2113.904010] env[62875]: DEBUG oslo.service.loopingcall [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2113.904833] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2113.905328] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a24f6ea-8b8e-4c19-bcfe-f6922dcfd7e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.937250] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628/8f817564-b224-4dcb-bd8c-4d63509a5628.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2113.938997] env[62875]: DEBUG nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2113.944051] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de218c60-0d99-44fc-8bb7-07dc671500f3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.959209] env[62875]: DEBUG nova.network.neutron [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Successfully created port: dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2113.963157] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2113.963157] env[62875]: value = "task-2180237" [ 2113.963157] env[62875]: _type = "Task" [ 2113.963157] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.967172] env[62875]: DEBUG nova.compute.manager [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Received event network-changed-db4ba514-bcb8-4bac-a3dc-400ffde442b7 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2113.967172] env[62875]: DEBUG nova.compute.manager [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Refreshing instance network info cache due to event network-changed-db4ba514-bcb8-4bac-a3dc-400ffde442b7. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2113.967172] env[62875]: DEBUG oslo_concurrency.lockutils [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] Acquiring lock "refresh_cache-7f16b893-02e4-4395-b787-f82bc4549e4a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2113.967172] env[62875]: DEBUG oslo_concurrency.lockutils [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] Acquired lock "refresh_cache-7f16b893-02e4-4395-b787-f82bc4549e4a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2113.967172] env[62875]: DEBUG nova.network.neutron [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Refreshing network info cache for port db4ba514-bcb8-4bac-a3dc-400ffde442b7 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2113.973361] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2113.973361] env[62875]: value = "task-2180238" [ 2113.973361] env[62875]: _type = "Task" [ 2113.973361] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.981310] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180237, 'name': CreateVM_Task} progress is 15%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.987219] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180238, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.149080] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.166618] env[62875]: INFO nova.compute.manager [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Took 35.67 seconds to build instance. [ 2114.270103] env[62875]: INFO nova.compute.manager [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Took 34.84 seconds to build instance. [ 2114.334255] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf30ef9-6d6c-4e7a-b76a-63d883cf9b63 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.343377] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0087c4a-543e-456b-8066-244ba987bebe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.378312] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea249e05-3765-41ed-a885-0468a864b813 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.386517] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41011b13-540e-4b2d-aa23-9cdfbe429174 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.400904] env[62875]: DEBUG nova.compute.provider_tree [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2114.414441] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.477611] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180237, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.487927] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180238, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.668736] env[62875]: DEBUG oslo_concurrency.lockutils [None req-230507e0-a986-4c3b-b972-5aebc94fe1d5 tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "e811f624-2dda-468c-ab28-9744c300eb1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.189s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.772858] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2caa320b-bc21-4779-8c96-a90bd676a368 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "44a248f6-443c-4b7c-95f0-088f0cdb924d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.347s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.781943] env[62875]: DEBUG nova.network.neutron [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Updated VIF entry in instance network info cache for port db4ba514-bcb8-4bac-a3dc-400ffde442b7. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2114.782581] env[62875]: DEBUG nova.network.neutron [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Updating instance_info_cache with network_info: [{"id": "db4ba514-bcb8-4bac-a3dc-400ffde442b7", "address": "fa:16:3e:33:fe:4f", "network": {"id": "aeccbf75-8666-4d39-b693-43d06fb910bf", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1097242513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d453e43d9cf49f8838d738ee308bf22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb4ba514-bc", "ovs_interfaceid": "db4ba514-bcb8-4bac-a3dc-400ffde442b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.905534] env[62875]: DEBUG nova.scheduler.client.report [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2114.970965] env[62875]: DEBUG nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2114.980534] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180237, 'name': CreateVM_Task, 'duration_secs': 0.687923} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.984931] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2114.986243] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.986551] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.986975] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2114.987771] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0336802b-1c33-47d9-97c6-13720dfac140 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.993747] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180238, 'name': ReconfigVM_Task, 'duration_secs': 0.768257} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.997019] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628/8f817564-b224-4dcb-bd8c-4d63509a5628.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2114.997019] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5503d8bd-92ac-4e82-96c9-597a4549f1b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.998846] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2114.998846] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529bcad2-228c-328d-fc48-4ba4778aa153" [ 2114.998846] env[62875]: _type = "Task" [ 2114.998846] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.010502] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2115.010502] env[62875]: value = "task-2180239" [ 2115.010502] env[62875]: _type = "Task" [ 2115.010502] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.018478] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2115.018769] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2115.018987] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2115.019238] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2115.019427] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2115.019604] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2115.019864] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2115.020059] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2115.020264] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2115.020501] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2115.020718] env[62875]: DEBUG nova.virt.hardware [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2115.021811] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf79a57-4f55-4dbf-ae0a-13009a98a2af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.031559] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529bcad2-228c-328d-fc48-4ba4778aa153, 'name': SearchDatastore_Task, 'duration_secs': 0.012681} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.032454] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.032761] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2115.033118] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.033333] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.033559] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2115.040289] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26b77b8d-d149-459e-9650-1e1573cdd6bd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.044553] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180239, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.047624] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a07c4d2-a2ca-4a48-8187-1724629fe7a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.063972] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2115.064171] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2115.064923] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-439247e1-151b-4b08-83df-e19dc76df0ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.070714] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2115.070714] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52537e5f-5a43-970b-88cb-6cce0e4ebe45" [ 2115.070714] env[62875]: _type = "Task" [ 2115.070714] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.079780] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52537e5f-5a43-970b-88cb-6cce0e4ebe45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.291150] env[62875]: DEBUG oslo_concurrency.lockutils [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] Releasing lock "refresh_cache-7f16b893-02e4-4395-b787-f82bc4549e4a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.291150] env[62875]: DEBUG nova.compute.manager [req-6392c44a-e733-4829-b31e-dd070649fcfe req-0221e814-6ac7-45a7-a3f6-7820e94b3385 service nova] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Received event network-vif-deleted-a7a79e94-603f-457d-a72b-08c0228a924b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2115.410294] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.411084] env[62875]: DEBUG nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2115.414969] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.082s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.415320] env[62875]: DEBUG nova.objects.instance [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lazy-loading 'resources' on Instance uuid 1230e54c-701a-4406-95bd-14e32914bc8d {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2115.521676] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180239, 'name': Rename_Task, 'duration_secs': 0.252423} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.521885] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2115.522136] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5c6ae4c-b30d-4c66-9f68-1e6a79adb97c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.528738] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2115.528738] env[62875]: value = "task-2180240" [ 2115.528738] env[62875]: _type = "Task" [ 2115.528738] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.536936] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180240, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.581480] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52537e5f-5a43-970b-88cb-6cce0e4ebe45, 'name': SearchDatastore_Task, 'duration_secs': 0.011273} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.582310] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52eb3dc0-c36e-4830-838d-c890711866ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.587568] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2115.587568] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bd7ebb-a627-34e6-3661-5a24dc914af0" [ 2115.587568] env[62875]: _type = "Task" [ 2115.587568] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.595383] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bd7ebb-a627-34e6-3661-5a24dc914af0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.830711] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquiring lock "e811f624-2dda-468c-ab28-9744c300eb1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.833161] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "e811f624-2dda-468c-ab28-9744c300eb1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.833161] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquiring lock "e811f624-2dda-468c-ab28-9744c300eb1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.833161] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "e811f624-2dda-468c-ab28-9744c300eb1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.833161] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "e811f624-2dda-468c-ab28-9744c300eb1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.843548] env[62875]: INFO nova.compute.manager [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Terminating instance [ 2115.918377] env[62875]: DEBUG nova.compute.utils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2115.925378] env[62875]: DEBUG nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2115.930811] env[62875]: DEBUG nova.network.neutron [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2115.957192] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "a19f5bee-ece8-4aa3-8c33-9474da385238" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.957573] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "a19f5bee-ece8-4aa3-8c33-9474da385238" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.958975] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "a19f5bee-ece8-4aa3-8c33-9474da385238-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.959311] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "a19f5bee-ece8-4aa3-8c33-9474da385238-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.959522] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "a19f5bee-ece8-4aa3-8c33-9474da385238-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.962592] env[62875]: INFO nova.compute.manager [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Terminating instance [ 2115.984278] env[62875]: DEBUG nova.policy [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3d0e175791341aea0db00ef8a1b5680', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '226340868e7446cca12688a32d13c630', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2115.988546] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.988802] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.989008] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.989677] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.989890] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.992982] env[62875]: INFO nova.compute.manager [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Terminating instance [ 2115.996356] env[62875]: DEBUG nova.compute.manager [req-fb0c7ca8-35c0-4bb8-84a0-7cc6918a96f2 req-6035e740-8d7d-4527-9833-65f9a6a00c34 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Received event network-changed-55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2115.996356] env[62875]: DEBUG nova.compute.manager [req-fb0c7ca8-35c0-4bb8-84a0-7cc6918a96f2 req-6035e740-8d7d-4527-9833-65f9a6a00c34 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Refreshing instance network info cache due to event network-changed-55ce29a2-111a-4739-a7fc-ffa36ff6fa70. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2115.996696] env[62875]: DEBUG oslo_concurrency.lockutils [req-fb0c7ca8-35c0-4bb8-84a0-7cc6918a96f2 req-6035e740-8d7d-4527-9833-65f9a6a00c34 service nova] Acquiring lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.996815] env[62875]: DEBUG oslo_concurrency.lockutils [req-fb0c7ca8-35c0-4bb8-84a0-7cc6918a96f2 req-6035e740-8d7d-4527-9833-65f9a6a00c34 service nova] Acquired lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.996982] env[62875]: DEBUG nova.network.neutron [req-fb0c7ca8-35c0-4bb8-84a0-7cc6918a96f2 req-6035e740-8d7d-4527-9833-65f9a6a00c34 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Refreshing network info cache for port 55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2116.043167] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180240, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.099829] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bd7ebb-a627-34e6-3661-5a24dc914af0, 'name': SearchDatastore_Task, 'duration_secs': 0.032329} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.103402] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.103619] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 7f16b893-02e4-4395-b787-f82bc4549e4a/7f16b893-02e4-4395-b787-f82bc4549e4a.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2116.104191] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47ca317b-81f2-4a80-a795-9cd9c7e4a30a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.111062] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2116.111062] env[62875]: value = "task-2180241" [ 2116.111062] env[62875]: _type = "Task" [ 2116.111062] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.120641] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180241, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.238942] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc8bdc7-21b5-4a3c-b672-c719d53bfc06 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.247769] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c211c3-a488-41e4-8a0c-0ed8d1f7d5c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.292695] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbfc666-0b33-439b-8afc-f03426f71fd9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.301894] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b392af-1f56-4b65-8496-92045282ae57 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.320522] env[62875]: DEBUG nova.compute.provider_tree [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2116.330159] env[62875]: DEBUG nova.network.neutron [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Successfully created port: 34058ec7-a269-4392-a5aa-933b163e8602 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2116.346911] env[62875]: INFO nova.compute.manager [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Rebuilding instance [ 2116.352220] env[62875]: DEBUG nova.compute.manager [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2116.352554] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2116.353578] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84579f8-981a-4139-b5a9-b388b8eac993 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.362150] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2116.362437] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f65dc3a1-516d-4a32-ba9d-79c401c0d63d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.373822] env[62875]: DEBUG oslo_vmware.api [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2116.373822] env[62875]: value = "task-2180242" [ 2116.373822] env[62875]: _type = "Task" [ 2116.373822] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.390871] env[62875]: DEBUG oslo_vmware.api [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180242, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.411546] env[62875]: DEBUG nova.compute.manager [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2116.412473] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fd6dfa-7d08-45f4-a223-bd89d2459fbc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.426375] env[62875]: DEBUG nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2116.467324] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "refresh_cache-a19f5bee-ece8-4aa3-8c33-9474da385238" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.467508] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired lock "refresh_cache-a19f5bee-ece8-4aa3-8c33-9474da385238" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.467707] env[62875]: DEBUG nova.network.neutron [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2116.494895] env[62875]: DEBUG nova.network.neutron [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Successfully updated port: dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2116.505754] env[62875]: DEBUG nova.compute.manager [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2116.506118] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2116.507876] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe83fa46-d397-49cf-8d63-0a096405eeb8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.520664] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2116.521080] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-079a11fc-1e21-4fba-b140-33f13245c0e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.528196] env[62875]: DEBUG oslo_vmware.api [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2116.528196] env[62875]: value = "task-2180243" [ 2116.528196] env[62875]: _type = "Task" [ 2116.528196] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.540359] env[62875]: DEBUG oslo_vmware.api [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.548603] env[62875]: DEBUG oslo_vmware.api [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180240, 'name': PowerOnVM_Task, 'duration_secs': 0.604427} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.548603] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2116.548603] env[62875]: INFO nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Took 8.42 seconds to spawn the instance on the hypervisor. [ 2116.548603] env[62875]: DEBUG nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2116.549762] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad82493-327e-4e96-8912-832ab9ef56a0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.622573] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180241, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.823994] env[62875]: DEBUG nova.scheduler.client.report [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2116.842484] env[62875]: DEBUG nova.network.neutron [req-fb0c7ca8-35c0-4bb8-84a0-7cc6918a96f2 req-6035e740-8d7d-4527-9833-65f9a6a00c34 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updated VIF entry in instance network info cache for port 55ce29a2-111a-4739-a7fc-ffa36ff6fa70. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2116.842850] env[62875]: DEBUG nova.network.neutron [req-fb0c7ca8-35c0-4bb8-84a0-7cc6918a96f2 req-6035e740-8d7d-4527-9833-65f9a6a00c34 service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updating instance_info_cache with network_info: [{"id": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "address": "fa:16:3e:94:22:ba", "network": {"id": "2cff2a01-7ae2-4baf-9939-c80859b6f520", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-16382020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "770eddfb80d943c7a34a3d9a60845079", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ce29a2-11", "ovs_interfaceid": "55ce29a2-111a-4739-a7fc-ffa36ff6fa70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.883929] env[62875]: DEBUG oslo_vmware.api [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180242, 'name': PowerOffVM_Task, 'duration_secs': 0.337003} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.884223] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2116.884391] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2116.884635] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b0a66ac-2f00-49ad-a413-2dce397186b9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.988312] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2116.988616] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2116.988833] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Deleting the datastore file [datastore1] e811f624-2dda-468c-ab28-9744c300eb1d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2116.989795] env[62875]: DEBUG nova.network.neutron [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2116.991520] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8654e37c-9871-453f-a0d8-e3dce0cd6a47 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.997519] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.997661] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.997842] env[62875]: DEBUG nova.network.neutron [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2117.000575] env[62875]: DEBUG oslo_vmware.api [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for the task: (returnval){ [ 2117.000575] env[62875]: value = "task-2180245" [ 2117.000575] env[62875]: _type = "Task" [ 2117.000575] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.015683] env[62875]: DEBUG oslo_vmware.api [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.038897] env[62875]: DEBUG oslo_vmware.api [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180243, 'name': PowerOffVM_Task, 'duration_secs': 0.23104} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.038897] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2117.038897] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2117.039025] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0af182b-aad7-4759-9f71-897816cabc1c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.070250] env[62875]: INFO nova.compute.manager [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Took 31.71 seconds to build instance. [ 2117.079433] env[62875]: DEBUG nova.network.neutron [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.122662] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180241, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579256} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.123017] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 7f16b893-02e4-4395-b787-f82bc4549e4a/7f16b893-02e4-4395-b787-f82bc4549e4a.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2117.123297] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2117.123635] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f18cb07-f6b9-4ef1-a60e-959bb275e4f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.131576] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2117.131576] env[62875]: value = "task-2180247" [ 2117.131576] env[62875]: _type = "Task" [ 2117.131576] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.139863] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2117.140104] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2117.140315] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Deleting the datastore file [datastore1] 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2117.143407] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15ed57f9-f5cb-4118-bb5a-0fad19435c60 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.145320] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.149700] env[62875]: DEBUG oslo_vmware.api [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2117.149700] env[62875]: value = "task-2180248" [ 2117.149700] env[62875]: _type = "Task" [ 2117.149700] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.157914] env[62875]: DEBUG oslo_vmware.api [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.328999] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.914s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.331399] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.980s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.332908] env[62875]: INFO nova.compute.claims [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2117.345389] env[62875]: DEBUG oslo_concurrency.lockutils [req-fb0c7ca8-35c0-4bb8-84a0-7cc6918a96f2 req-6035e740-8d7d-4527-9833-65f9a6a00c34 service nova] Releasing lock "refresh_cache-76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.351794] env[62875]: INFO nova.scheduler.client.report [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Deleted allocations for instance 1230e54c-701a-4406-95bd-14e32914bc8d [ 2117.425462] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2117.425755] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb9bb6ab-5d65-464f-8f51-e2924a788305 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.432736] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2117.432736] env[62875]: value = "task-2180249" [ 2117.432736] env[62875]: _type = "Task" [ 2117.432736] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.436772] env[62875]: DEBUG nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2117.444989] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.462283] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2117.462532] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2117.462715] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2117.462873] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2117.463026] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2117.463179] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2117.463384] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2117.463542] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2117.463710] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2117.463871] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2117.464106] env[62875]: DEBUG nova.virt.hardware [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2117.464926] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6880bf7-a1cb-4a04-9be8-2f311699b002 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.473549] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753b7c84-5a08-41ee-85b6-7ca627b8f93a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.515648] env[62875]: DEBUG oslo_vmware.api [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.571974] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f8f2e538-dcf2-4cc3-86c8-b0be63fa3ce5 tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "8f817564-b224-4dcb-bd8c-4d63509a5628" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.222s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.579498] env[62875]: DEBUG nova.network.neutron [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2117.581837] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Releasing lock "refresh_cache-a19f5bee-ece8-4aa3-8c33-9474da385238" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.582255] env[62875]: DEBUG nova.compute.manager [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2117.582447] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2117.583359] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a111d0-a51e-43fd-9f5b-28573d3d494b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.591144] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2117.591440] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23e32ee7-a486-4b96-86e0-889594370bbd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.600398] env[62875]: DEBUG oslo_vmware.api [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2117.600398] env[62875]: value = "task-2180250" [ 2117.600398] env[62875]: _type = "Task" [ 2117.600398] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.610778] env[62875]: DEBUG oslo_vmware.api [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.640839] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180247, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068401} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.641131] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2117.641933] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be473fb6-2b9f-42e0-b6e6-dda15906b158 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.673560] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 7f16b893-02e4-4395-b787-f82bc4549e4a/7f16b893-02e4-4395-b787-f82bc4549e4a.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2117.676651] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-740f37a1-4463-4b1b-8513-a7d6a1a98d8a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.692830] env[62875]: DEBUG oslo_vmware.api [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.441289} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.693378] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2117.693676] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2117.693768] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2117.694036] env[62875]: INFO nova.compute.manager [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2117.694189] env[62875]: DEBUG oslo.service.loopingcall [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2117.694422] env[62875]: DEBUG nova.compute.manager [-] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2117.695773] env[62875]: DEBUG nova.network.neutron [-] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2117.698221] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2117.698221] env[62875]: value = "task-2180251" [ 2117.698221] env[62875]: _type = "Task" [ 2117.698221] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.711904] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180251, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.858958] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f633f15e-a7b6-4773-ba7c-383e219b08ed tempest-ServerDiagnosticsV248Test-1192499342 tempest-ServerDiagnosticsV248Test-1192499342-project-member] Lock "1230e54c-701a-4406-95bd-14e32914bc8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.802s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.896547] env[62875]: DEBUG nova.network.neutron [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Updating instance_info_cache with network_info: [{"id": "dcbee196-268d-4405-a144-5c6eca186b1d", "address": "fa:16:3e:ef:8b:ce", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcbee196-26", "ovs_interfaceid": "dcbee196-268d-4405-a144-5c6eca186b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.937789] env[62875]: DEBUG nova.network.neutron [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Successfully updated port: 34058ec7-a269-4392-a5aa-933b163e8602 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2117.947160] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180249, 'name': PowerOffVM_Task, 'duration_secs': 0.21194} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.947932] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2117.948182] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2117.949017] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7bcdfa-a31e-476c-8992-41988a5be559 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.958861] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2117.959140] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f2c7887-f53b-4788-a1ac-e9878bbd8706 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.017329] env[62875]: DEBUG oslo_vmware.api [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Task: {'id': task-2180245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.552245} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.017785] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2118.018114] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2118.018431] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2118.018807] env[62875]: INFO nova.compute.manager [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Took 1.67 seconds to destroy the instance on the hypervisor. [ 2118.023138] env[62875]: DEBUG oslo.service.loopingcall [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2118.024557] env[62875]: DEBUG nova.compute.manager [-] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2118.024557] env[62875]: DEBUG nova.network.neutron [-] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2118.042502] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2118.042739] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2118.042936] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleting the datastore file [datastore1] 44a248f6-443c-4b7c-95f0-088f0cdb924d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2118.043220] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3ddb4d1-cd82-4d2b-b965-3dafde672a35 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.050344] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2118.050344] env[62875]: value = "task-2180253" [ 2118.050344] env[62875]: _type = "Task" [ 2118.050344] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.059178] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.094149] env[62875]: DEBUG nova.compute.manager [req-001faf8b-6704-470d-91fe-6eac3056d67d req-b41cb6fa-59d4-4d44-8105-ac385941370c service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Received event network-vif-plugged-34058ec7-a269-4392-a5aa-933b163e8602 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2118.094149] env[62875]: DEBUG oslo_concurrency.lockutils [req-001faf8b-6704-470d-91fe-6eac3056d67d req-b41cb6fa-59d4-4d44-8105-ac385941370c service nova] Acquiring lock "380229e2-25ba-47cb-a6ca-167b9d9672eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.094269] env[62875]: DEBUG oslo_concurrency.lockutils [req-001faf8b-6704-470d-91fe-6eac3056d67d req-b41cb6fa-59d4-4d44-8105-ac385941370c service nova] Lock "380229e2-25ba-47cb-a6ca-167b9d9672eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.094467] env[62875]: DEBUG oslo_concurrency.lockutils [req-001faf8b-6704-470d-91fe-6eac3056d67d req-b41cb6fa-59d4-4d44-8105-ac385941370c service nova] Lock "380229e2-25ba-47cb-a6ca-167b9d9672eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.094658] env[62875]: DEBUG nova.compute.manager [req-001faf8b-6704-470d-91fe-6eac3056d67d req-b41cb6fa-59d4-4d44-8105-ac385941370c service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] No waiting events found dispatching network-vif-plugged-34058ec7-a269-4392-a5aa-933b163e8602 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2118.094845] env[62875]: WARNING nova.compute.manager [req-001faf8b-6704-470d-91fe-6eac3056d67d req-b41cb6fa-59d4-4d44-8105-ac385941370c service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Received unexpected event network-vif-plugged-34058ec7-a269-4392-a5aa-933b163e8602 for instance with vm_state building and task_state spawning. [ 2118.116553] env[62875]: DEBUG oslo_vmware.api [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180250, 'name': PowerOffVM_Task, 'duration_secs': 0.274111} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.116875] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2118.118997] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2118.118997] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48bb5f8b-9511-4910-bb37-41aab2dec32c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.144689] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2118.144960] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2118.145158] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Deleting the datastore file [datastore1] a19f5bee-ece8-4aa3-8c33-9474da385238 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2118.145439] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-734589e7-7337-42ac-95b3-635503c17ba3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.152286] env[62875]: DEBUG oslo_vmware.api [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2118.152286] env[62875]: value = "task-2180255" [ 2118.152286] env[62875]: _type = "Task" [ 2118.152286] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.162301] env[62875]: DEBUG oslo_vmware.api [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.170760] env[62875]: DEBUG nova.compute.manager [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Received event network-vif-plugged-dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2118.170898] env[62875]: DEBUG oslo_concurrency.lockutils [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] Acquiring lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.171128] env[62875]: DEBUG oslo_concurrency.lockutils [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.172536] env[62875]: DEBUG oslo_concurrency.lockutils [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.172831] env[62875]: DEBUG nova.compute.manager [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] No waiting events found dispatching network-vif-plugged-dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2118.173063] env[62875]: WARNING nova.compute.manager [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Received unexpected event network-vif-plugged-dcbee196-268d-4405-a144-5c6eca186b1d for instance with vm_state building and task_state spawning. [ 2118.173273] env[62875]: DEBUG nova.compute.manager [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Received event network-changed-dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2118.173496] env[62875]: DEBUG nova.compute.manager [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Refreshing instance network info cache due to event network-changed-dcbee196-268d-4405-a144-5c6eca186b1d. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2118.173728] env[62875]: DEBUG oslo_concurrency.lockutils [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] Acquiring lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.211171] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180251, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.225304] env[62875]: INFO nova.compute.manager [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Rescuing [ 2118.225492] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.225678] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquired lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.225875] env[62875]: DEBUG nova.network.neutron [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2118.399027] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.399848] env[62875]: DEBUG nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Instance network_info: |[{"id": "dcbee196-268d-4405-a144-5c6eca186b1d", "address": "fa:16:3e:ef:8b:ce", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcbee196-26", "ovs_interfaceid": "dcbee196-268d-4405-a144-5c6eca186b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2118.400015] env[62875]: DEBUG oslo_concurrency.lockutils [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] Acquired lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.400227] env[62875]: DEBUG nova.network.neutron [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Refreshing network info cache for port dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2118.401505] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:8b:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b2049d7-f99e-425a-afdb-2c95ca88e483', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcbee196-268d-4405-a144-5c6eca186b1d', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2118.408851] env[62875]: DEBUG oslo.service.loopingcall [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2118.412153] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2118.412985] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2096b594-64b7-4f76-b561-4dde0db14362 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.435745] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2118.435745] env[62875]: value = "task-2180256" [ 2118.435745] env[62875]: _type = "Task" [ 2118.435745] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.447277] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "refresh_cache-380229e2-25ba-47cb-a6ca-167b9d9672eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.447415] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "refresh_cache-380229e2-25ba-47cb-a6ca-167b9d9672eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.447563] env[62875]: DEBUG nova.network.neutron [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2118.448533] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180256, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.560591] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161706} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.560887] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2118.561086] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2118.561266] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2118.575738] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761dbecb-9f5a-4207-9ddc-12ecb032cda2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.583414] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e681ab85-4692-4044-b22d-a9649241a51e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.616570] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113ac732-4b95-4409-8fef-366d1eab59cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.625234] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a1c043-83ec-4d9b-8b30-b2caba816e0d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.639742] env[62875]: DEBUG nova.compute.provider_tree [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2118.661342] env[62875]: DEBUG oslo_vmware.api [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111176} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.661667] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2118.661852] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2118.662043] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2118.662217] env[62875]: INFO nova.compute.manager [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2118.662454] env[62875]: DEBUG oslo.service.loopingcall [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2118.662641] env[62875]: DEBUG nova.compute.manager [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2118.662754] env[62875]: DEBUG nova.network.neutron [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2118.680132] env[62875]: DEBUG nova.network.neutron [-] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.682065] env[62875]: DEBUG nova.network.neutron [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2118.708844] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180251, 'name': ReconfigVM_Task, 'duration_secs': 0.514721} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.709188] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 7f16b893-02e4-4395-b787-f82bc4549e4a/7f16b893-02e4-4395-b787-f82bc4549e4a.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2118.709807] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8f93cb2-d76f-45d6-bde2-6f20f5315ed3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.716347] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2118.716347] env[62875]: value = "task-2180257" [ 2118.716347] env[62875]: _type = "Task" [ 2118.716347] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.725927] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180257, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.777193] env[62875]: DEBUG nova.network.neutron [-] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.952525] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180256, 'name': CreateVM_Task, 'duration_secs': 0.43852} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.956517] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2118.957758] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.957758] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.958073] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2118.958796] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fac43f3-d312-4149-a202-aa1146eed48b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.964111] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2118.964111] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5236c218-470d-2da9-e5ec-7302de759d0f" [ 2118.964111] env[62875]: _type = "Task" [ 2118.964111] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.976846] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5236c218-470d-2da9-e5ec-7302de759d0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.015913] env[62875]: DEBUG nova.network.neutron [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [{"id": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "address": "fa:16:3e:b9:e1:fb", "network": {"id": "260943be-4698-4425-ae4b-22d21e036685", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1758636538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82e42c29c6dd480c87096bea1977074d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e6fd20c-b2", "ovs_interfaceid": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.021744] env[62875]: DEBUG nova.network.neutron [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2119.143190] env[62875]: DEBUG nova.scheduler.client.report [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2119.185259] env[62875]: INFO nova.compute.manager [-] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Took 1.49 seconds to deallocate network for instance. [ 2119.185259] env[62875]: DEBUG nova.network.neutron [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.230761] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180257, 'name': Rename_Task, 'duration_secs': 0.154421} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.231219] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2119.231580] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ceb92d9-ca73-4084-87db-710df3186b7b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.242213] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2119.242213] env[62875]: value = "task-2180258" [ 2119.242213] env[62875]: _type = "Task" [ 2119.242213] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.247834] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180258, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.281106] env[62875]: INFO nova.compute.manager [-] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Took 1.26 seconds to deallocate network for instance. [ 2119.341277] env[62875]: DEBUG nova.network.neutron [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Updated VIF entry in instance network info cache for port dcbee196-268d-4405-a144-5c6eca186b1d. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2119.341277] env[62875]: DEBUG nova.network.neutron [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Updating instance_info_cache with network_info: [{"id": "dcbee196-268d-4405-a144-5c6eca186b1d", "address": "fa:16:3e:ef:8b:ce", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcbee196-26", "ovs_interfaceid": "dcbee196-268d-4405-a144-5c6eca186b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.355079] env[62875]: DEBUG nova.network.neutron [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Updating instance_info_cache with network_info: [{"id": "34058ec7-a269-4392-a5aa-933b163e8602", "address": "fa:16:3e:93:cb:4b", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34058ec7-a2", "ovs_interfaceid": "34058ec7-a269-4392-a5aa-933b163e8602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.474161] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5236c218-470d-2da9-e5ec-7302de759d0f, 'name': SearchDatastore_Task, 'duration_secs': 0.009774} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.474489] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.474752] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2119.474997] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2119.475162] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.475383] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2119.475636] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6377d14-5fb8-4cb9-9013-fde97bc83004 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.483711] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2119.483884] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2119.484603] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c77a7a1a-1201-47a8-b91c-6fc8943682ac {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.489399] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2119.489399] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528cec5c-05b0-db16-66e4-68e8b854304a" [ 2119.489399] env[62875]: _type = "Task" [ 2119.489399] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.497555] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528cec5c-05b0-db16-66e4-68e8b854304a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.525270] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Releasing lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.600744] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2119.601010] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2119.601179] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2119.601365] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2119.601548] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2119.601726] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2119.601934] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2119.602103] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2119.602272] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2119.602431] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2119.602642] env[62875]: DEBUG nova.virt.hardware [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2119.603741] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d74386-4696-4301-a9f0-14554d0901f9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.611524] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f762a8c-e323-45f6-b1ff-b1d397cc0f04 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.625434] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:ca:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a8a41b9-756a-4ad7-b2f3-3a05d58a308d', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2119.633509] env[62875]: DEBUG oslo.service.loopingcall [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2119.633766] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2119.633968] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cba0975-d620-44b9-a3c6-d3b96e40de2b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.648583] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.649061] env[62875]: DEBUG nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2119.652287] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.884s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.653741] env[62875]: INFO nova.compute.claims [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2119.662555] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2119.662555] env[62875]: value = "task-2180259" [ 2119.662555] env[62875]: _type = "Task" [ 2119.662555] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.670378] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180259, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.688449] env[62875]: INFO nova.compute.manager [-] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Took 1.03 seconds to deallocate network for instance. [ 2119.694711] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.749895] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180258, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.786696] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.843395] env[62875]: DEBUG oslo_concurrency.lockutils [req-8b413b71-827b-4786-9737-ab4ef352eb7b req-a75044d5-7853-489e-8865-b3ae090c41ad service nova] Releasing lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.857220] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "refresh_cache-380229e2-25ba-47cb-a6ca-167b9d9672eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.857647] env[62875]: DEBUG nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Instance network_info: |[{"id": "34058ec7-a269-4392-a5aa-933b163e8602", "address": "fa:16:3e:93:cb:4b", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34058ec7-a2", "ovs_interfaceid": "34058ec7-a269-4392-a5aa-933b163e8602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2119.857999] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:cb:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34058ec7-a269-4392-a5aa-933b163e8602', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2119.865716] env[62875]: DEBUG oslo.service.loopingcall [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2119.866285] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2119.866543] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ff2afc7-8796-452d-9aff-eb1b8bfdf095 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.886758] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2119.886758] env[62875]: value = "task-2180260" [ 2119.886758] env[62875]: _type = "Task" [ 2119.886758] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.896740] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180260, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.999874] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528cec5c-05b0-db16-66e4-68e8b854304a, 'name': SearchDatastore_Task, 'duration_secs': 0.008} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.000665] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d70f8fcb-bda6-46d9-beab-73e2e9705172 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.006164] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2120.006164] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529a60eb-e3ed-7c32-5141-6c065085fba0" [ 2120.006164] env[62875]: _type = "Task" [ 2120.006164] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.014395] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529a60eb-e3ed-7c32-5141-6c065085fba0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.153844] env[62875]: DEBUG nova.compute.utils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2120.155316] env[62875]: DEBUG nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2120.155486] env[62875]: DEBUG nova.network.neutron [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2120.168692] env[62875]: DEBUG nova.compute.manager [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Received event network-changed-34058ec7-a269-4392-a5aa-933b163e8602 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2120.168846] env[62875]: DEBUG nova.compute.manager [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Refreshing instance network info cache due to event network-changed-34058ec7-a269-4392-a5aa-933b163e8602. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2120.169212] env[62875]: DEBUG oslo_concurrency.lockutils [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] Acquiring lock "refresh_cache-380229e2-25ba-47cb-a6ca-167b9d9672eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.169426] env[62875]: DEBUG oslo_concurrency.lockutils [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] Acquired lock "refresh_cache-380229e2-25ba-47cb-a6ca-167b9d9672eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.169608] env[62875]: DEBUG nova.network.neutron [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Refreshing network info cache for port 34058ec7-a269-4392-a5aa-933b163e8602 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2120.177429] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180259, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.198636] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.199805] env[62875]: DEBUG nova.compute.manager [req-71194405-7f5e-45a1-be69-6ce8f6a0298c req-f744631e-5778-4366-ab5e-a44ceca1de5d service nova] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Received event network-vif-deleted-55ce29a2-111a-4739-a7fc-ffa36ff6fa70 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2120.211292] env[62875]: DEBUG nova.policy [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae8dd9ca6c0b462aa3db7bcfae81422b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63be470870764b6ab6e803cc2a345f24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2120.250026] env[62875]: DEBUG oslo_vmware.api [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180258, 'name': PowerOnVM_Task, 'duration_secs': 0.834378} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.250323] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2120.250550] env[62875]: INFO nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Took 9.65 seconds to spawn the instance on the hypervisor. [ 2120.250752] env[62875]: DEBUG nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2120.251615] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b491d43-42c2-411d-976e-52c8181848c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.396638] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180260, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.516979] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529a60eb-e3ed-7c32-5141-6c065085fba0, 'name': SearchDatastore_Task, 'duration_secs': 0.009865} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.517263] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.517677] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 7c081de0-1952-4ca8-8f6f-80102e20bff0/7c081de0-1952-4ca8-8f6f-80102e20bff0.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2120.517805] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd62b0eb-1bcb-40aa-858b-5f47c3a07648 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.524571] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2120.524571] env[62875]: value = "task-2180261" [ 2120.524571] env[62875]: _type = "Task" [ 2120.524571] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.533415] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.659403] env[62875]: DEBUG nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2120.666140] env[62875]: DEBUG nova.network.neutron [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Successfully created port: c9763427-3b9c-4a83-a0df-a284b4cf99a0 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2120.691140] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180259, 'name': CreateVM_Task, 'duration_secs': 0.923042} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.691277] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2120.693842] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.693842] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.693842] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2120.693842] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd5977b0-28a8-419d-aca4-36b782ecf737 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.699340] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2120.699340] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dfb70c-7ffe-24c2-9048-a2d276799dcb" [ 2120.699340] env[62875]: _type = "Task" [ 2120.699340] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.711635] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dfb70c-7ffe-24c2-9048-a2d276799dcb, 'name': SearchDatastore_Task, 'duration_secs': 0.009817} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.711975] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.712101] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2120.712342] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.712489] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.712673] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2120.713215] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e895f930-8025-433f-8524-821d6a1b9546 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.728102] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2120.728310] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2120.729106] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c204388-6320-417e-b5ca-24dac138ebb2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.737305] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2120.737305] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52027e7b-428d-214d-0d07-a0fc36f292c4" [ 2120.737305] env[62875]: _type = "Task" [ 2120.737305] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.747556] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52027e7b-428d-214d-0d07-a0fc36f292c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.770080] env[62875]: INFO nova.compute.manager [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Took 32.76 seconds to build instance. [ 2120.901238] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180260, 'name': CreateVM_Task, 'duration_secs': 0.697479} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.904053] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2120.905037] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.905306] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.905565] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2120.905914] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40d19d30-beb4-4341-b9ca-ca2a8d72fe94 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.914207] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2120.914207] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52410fb0-6d07-6d88-c437-af3eec05596f" [ 2120.914207] env[62875]: _type = "Task" [ 2120.914207] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.930023] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52410fb0-6d07-6d88-c437-af3eec05596f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.962157] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ab9852-349d-4f72-9306-d73c1eff6eb0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.971932] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d32f21-870a-4043-ad84-7c520966b696 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.010788] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105b0ed2-3bf6-4947-8699-d9209d992224 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.019953] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138b47ab-4bae-4dfa-96be-a0d93f3a8f1b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.038802] env[62875]: DEBUG nova.compute.provider_tree [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2121.043287] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180261, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.064704] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2121.064994] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3f71114-030a-47f1-abba-c1ef87d9785c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.072414] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2121.072414] env[62875]: value = "task-2180262" [ 2121.072414] env[62875]: _type = "Task" [ 2121.072414] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.080835] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.179382] env[62875]: DEBUG nova.network.neutron [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Updated VIF entry in instance network info cache for port 34058ec7-a269-4392-a5aa-933b163e8602. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2121.179735] env[62875]: DEBUG nova.network.neutron [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Updating instance_info_cache with network_info: [{"id": "34058ec7-a269-4392-a5aa-933b163e8602", "address": "fa:16:3e:93:cb:4b", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34058ec7-a2", "ovs_interfaceid": "34058ec7-a269-4392-a5aa-933b163e8602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.249166] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52027e7b-428d-214d-0d07-a0fc36f292c4, 'name': SearchDatastore_Task, 'duration_secs': 0.063963} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.254017] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaca7c1f-a85f-45a7-8ca6-8feb06665c9d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.256724] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2121.256724] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ff7e6b-646b-4290-eff4-e9b0dad22610" [ 2121.256724] env[62875]: _type = "Task" [ 2121.256724] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.265351] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ff7e6b-646b-4290-eff4-e9b0dad22610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.273048] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9edf6333-6d3b-48d6-a32f-47416aeaa46a tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "7f16b893-02e4-4395-b787-f82bc4549e4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.270s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.308311] env[62875]: DEBUG nova.network.neutron [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Successfully created port: 88706d53-4951-4a59-9a6a-324554fac125 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2121.428423] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52410fb0-6d07-6d88-c437-af3eec05596f, 'name': SearchDatastore_Task, 'duration_secs': 0.064019} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.428838] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.429036] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2121.429268] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.535613] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510865} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.535949] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 7c081de0-1952-4ca8-8f6f-80102e20bff0/7c081de0-1952-4ca8-8f6f-80102e20bff0.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2121.536377] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2121.536678] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01f15fd8-0a7d-40f6-bd61-c2d068a8a7ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.543563] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2121.543563] env[62875]: value = "task-2180263" [ 2121.543563] env[62875]: _type = "Task" [ 2121.543563] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.544423] env[62875]: DEBUG nova.scheduler.client.report [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2121.556914] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180263, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.584174] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180262, 'name': PowerOffVM_Task, 'duration_secs': 0.46476} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.584450] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2121.585269] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef13b671-7034-4b27-8f8c-92d76c7c0a91 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.604425] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89442fef-06b6-4ce4-a523-de5b509992d6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.641463] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2121.642226] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1781058-3bf8-4e41-b211-01269eed621d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.651745] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2121.651745] env[62875]: value = "task-2180264" [ 2121.651745] env[62875]: _type = "Task" [ 2121.651745] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.665691] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] VM already powered off {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2121.665922] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2121.666204] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.666358] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.666545] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2121.666827] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88558f8b-e2c7-4284-b068-00c508d0cde0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.674049] env[62875]: DEBUG nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2121.679528] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2121.679528] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2121.679528] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39395c2a-5d19-4d9a-9e31-de6cd4f6b0ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.683833] env[62875]: DEBUG oslo_concurrency.lockutils [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] Releasing lock "refresh_cache-380229e2-25ba-47cb-a6ca-167b9d9672eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.683833] env[62875]: DEBUG nova.compute.manager [req-0ead273b-5e8e-4d56-b096-3d3a44eed813 req-3e2e4dfa-ef61-47d7-9856-1a876c5b9928 service nova] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Received event network-vif-deleted-edc7469f-1104-497d-b8c6-1404fc3223ca {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2121.686786] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2121.686786] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5246e952-170f-447d-5860-8b0e8928fb40" [ 2121.686786] env[62875]: _type = "Task" [ 2121.686786] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.698416] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5246e952-170f-447d-5860-8b0e8928fb40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.704555] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2121.704795] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2121.704953] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2121.705154] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2121.705304] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2121.705452] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2121.705731] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2121.705892] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2121.706105] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2121.706265] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2121.706488] env[62875]: DEBUG nova.virt.hardware [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2121.707695] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c255fb89-3f0a-4a6d-bc6f-a180880a085c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.716658] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f1c14d-d1f9-43ad-9d0b-bc6f027acece {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.766605] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ff7e6b-646b-4290-eff4-e9b0dad22610, 'name': SearchDatastore_Task, 'duration_secs': 0.010304} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.766868] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.767150] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 44a248f6-443c-4b7c-95f0-088f0cdb924d/44a248f6-443c-4b7c-95f0-088f0cdb924d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2121.767426] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.767611] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2121.767814] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5276e905-b48a-454b-b7ec-c6e51a05f797 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.769822] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1c72f3a-23ee-4774-a8ee-4b345a987dd0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.775743] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2121.775743] env[62875]: value = "task-2180265" [ 2121.775743] env[62875]: _type = "Task" [ 2121.775743] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.779356] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2121.779525] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2121.780458] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3cd5f9d-5a3a-4c17-bfab-7c27ae20b41e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.785247] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.788384] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2121.788384] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52373f90-6830-3fcf-67e9-1a05c13ac0f0" [ 2121.788384] env[62875]: _type = "Task" [ 2121.788384] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.800579] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52373f90-6830-3fcf-67e9-1a05c13ac0f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.052824] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.053379] env[62875]: DEBUG nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2122.060248] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.910s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.060248] env[62875]: DEBUG nova.objects.instance [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lazy-loading 'resources' on Instance uuid 409b6902-f9ef-469b-a9db-4e93f764d199 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2122.060600] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180263, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190899} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.060957] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2122.062643] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7ab890-12a8-4ed3-8f08-c01a18bc2ee8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.088641] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 7c081de0-1952-4ca8-8f6f-80102e20bff0/7c081de0-1952-4ca8-8f6f-80102e20bff0.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2122.089364] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4287102a-5162-4134-8d61-0b5c22b0f916 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.111647] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2122.111647] env[62875]: value = "task-2180266" [ 2122.111647] env[62875]: _type = "Task" [ 2122.111647] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.122122] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180266, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.202895] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5246e952-170f-447d-5860-8b0e8928fb40, 'name': SearchDatastore_Task, 'duration_secs': 0.009088} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.204293] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ff7da3b-d2d7-448a-ad25-23d14d19e906 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.213178] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2122.213178] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523bc997-dedc-a7ec-0016-a42e4b18aec3" [ 2122.213178] env[62875]: _type = "Task" [ 2122.213178] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.224104] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523bc997-dedc-a7ec-0016-a42e4b18aec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.287302] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458132} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.287557] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 44a248f6-443c-4b7c-95f0-088f0cdb924d/44a248f6-443c-4b7c-95f0-088f0cdb924d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2122.287823] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2122.288181] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b749ced-c124-461b-8841-5fceb3eceb6a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.299026] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52373f90-6830-3fcf-67e9-1a05c13ac0f0, 'name': SearchDatastore_Task, 'duration_secs': 0.009401} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.300594] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2122.300594] env[62875]: value = "task-2180267" [ 2122.300594] env[62875]: _type = "Task" [ 2122.300594] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.300791] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-726dbd0d-6702-4070-881e-ccdda1be47fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.308299] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2122.308299] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5200cea9-9557-8eb3-ac8a-1a5f0fcfadff" [ 2122.308299] env[62875]: _type = "Task" [ 2122.308299] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.318714] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5200cea9-9557-8eb3-ac8a-1a5f0fcfadff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.564757] env[62875]: DEBUG nova.compute.utils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2122.568572] env[62875]: DEBUG nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2122.568572] env[62875]: DEBUG nova.network.neutron [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2122.612040] env[62875]: DEBUG nova.policy [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e79993abf5eb47cc8449e3468d3cdd4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bef7d358bb2746efb448dbf759cac58c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2122.625221] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180266, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.727204] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523bc997-dedc-a7ec-0016-a42e4b18aec3, 'name': SearchDatastore_Task, 'duration_secs': 0.01318} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.730859] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.731686] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk. {{(pid=62875) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2122.731816] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c59ee99d-51a9-4613-935e-4b12c8140459 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.740859] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2122.740859] env[62875]: value = "task-2180268" [ 2122.740859] env[62875]: _type = "Task" [ 2122.740859] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.755900] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.820516] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073446} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.824388] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2122.825850] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44b52f1-0b67-4804-8c48-1c86f6fd385e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.834773] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5200cea9-9557-8eb3-ac8a-1a5f0fcfadff, 'name': SearchDatastore_Task, 'duration_secs': 0.011659} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.836657] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.837040] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 380229e2-25ba-47cb-a6ca-167b9d9672eb/380229e2-25ba-47cb-a6ca-167b9d9672eb.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2122.838131] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df49c29-117c-4d1e-b217-533eeb63d16c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.857083] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ddc4b89-f379-4e17-b095-6f0a54cb69ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.874907] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 44a248f6-443c-4b7c-95f0-088f0cdb924d/44a248f6-443c-4b7c-95f0-088f0cdb924d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2122.875711] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3201111-dc0b-425e-83c4-23bb94f4e50c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.892397] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquiring lock "7f16b893-02e4-4395-b787-f82bc4549e4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.892647] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "7f16b893-02e4-4395-b787-f82bc4549e4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.892923] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquiring lock "7f16b893-02e4-4395-b787-f82bc4549e4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.893044] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "7f16b893-02e4-4395-b787-f82bc4549e4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.893213] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "7f16b893-02e4-4395-b787-f82bc4549e4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.896704] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1886ab92-7e23-4353-8546-89b3264e8e90 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.899945] env[62875]: INFO nova.compute.manager [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Terminating instance [ 2122.903436] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2122.903436] env[62875]: value = "task-2180270" [ 2122.903436] env[62875]: _type = "Task" [ 2122.903436] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.903694] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2122.903694] env[62875]: value = "task-2180269" [ 2122.903694] env[62875]: _type = "Task" [ 2122.903694] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.939226] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcdd88f-a8b2-4ab1-8906-966f1934dcd6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.948350] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180269, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.948659] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180270, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.953963] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bc21a6-6a1e-4b0c-ae8b-1ac99a66e40b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.970257] env[62875]: DEBUG nova.compute.provider_tree [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2123.071032] env[62875]: DEBUG nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2123.127955] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180266, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.129100] env[62875]: DEBUG nova.network.neutron [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Successfully created port: 821254fd-eb8e-4958-8bce-51b2447b3ee9 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2123.258176] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180268, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.393038] env[62875]: DEBUG nova.compute.manager [req-f2c1a0a5-d1e4-4fd4-a097-69d10ba9e3d5 req-870c90d2-3db4-4402-b8b2-c1f674fab5b4 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Received event network-vif-plugged-c9763427-3b9c-4a83-a0df-a284b4cf99a0 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2123.393038] env[62875]: DEBUG oslo_concurrency.lockutils [req-f2c1a0a5-d1e4-4fd4-a097-69d10ba9e3d5 req-870c90d2-3db4-4402-b8b2-c1f674fab5b4 service nova] Acquiring lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.393291] env[62875]: DEBUG oslo_concurrency.lockutils [req-f2c1a0a5-d1e4-4fd4-a097-69d10ba9e3d5 req-870c90d2-3db4-4402-b8b2-c1f674fab5b4 service nova] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.393330] env[62875]: DEBUG oslo_concurrency.lockutils [req-f2c1a0a5-d1e4-4fd4-a097-69d10ba9e3d5 req-870c90d2-3db4-4402-b8b2-c1f674fab5b4 service nova] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.393521] env[62875]: DEBUG nova.compute.manager [req-f2c1a0a5-d1e4-4fd4-a097-69d10ba9e3d5 req-870c90d2-3db4-4402-b8b2-c1f674fab5b4 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] No waiting events found dispatching network-vif-plugged-c9763427-3b9c-4a83-a0df-a284b4cf99a0 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2123.393690] env[62875]: WARNING nova.compute.manager [req-f2c1a0a5-d1e4-4fd4-a097-69d10ba9e3d5 req-870c90d2-3db4-4402-b8b2-c1f674fab5b4 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Received unexpected event network-vif-plugged-c9763427-3b9c-4a83-a0df-a284b4cf99a0 for instance with vm_state building and task_state spawning. [ 2123.406013] env[62875]: DEBUG nova.compute.manager [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2123.406250] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2123.414245] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef46940-5958-40fe-89b7-82351b285242 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.423724] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180269, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.429609] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2123.429952] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180270, 'name': ReconfigVM_Task, 'duration_secs': 0.430188} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.430184] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e6ec20b-e7aa-4f54-a912-8502123022b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.434202] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 44a248f6-443c-4b7c-95f0-088f0cdb924d/44a248f6-443c-4b7c-95f0-088f0cdb924d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2123.434202] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5c54ece-c152-452f-a1dd-51589216c89a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.441093] env[62875]: DEBUG oslo_vmware.api [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2123.441093] env[62875]: value = "task-2180271" [ 2123.441093] env[62875]: _type = "Task" [ 2123.441093] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.443148] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2123.443148] env[62875]: value = "task-2180272" [ 2123.443148] env[62875]: _type = "Task" [ 2123.443148] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.456185] env[62875]: DEBUG oslo_vmware.api [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.460392] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180272, 'name': Rename_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.477517] env[62875]: DEBUG nova.scheduler.client.report [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2123.604753] env[62875]: DEBUG nova.network.neutron [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Successfully updated port: c9763427-3b9c-4a83-a0df-a284b4cf99a0 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2123.624988] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180266, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.754590] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180268, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59696} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.754823] env[62875]: INFO nova.virt.vmwareapi.ds_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk. [ 2123.755632] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706123ce-035e-4ca4-8007-8e732633bb5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.782412] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2123.782412] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-babd41ec-80c6-42c6-8256-1c9f7e987dd3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.799817] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2123.799817] env[62875]: value = "task-2180273" [ 2123.799817] env[62875]: _type = "Task" [ 2123.799817] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.807800] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.918986] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180269, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.613862} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.919825] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 380229e2-25ba-47cb-a6ca-167b9d9672eb/380229e2-25ba-47cb-a6ca-167b9d9672eb.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2123.920187] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2123.920552] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bd8b1a8-a06e-43d4-8d99-8411d3f19e40 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.927787] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2123.927787] env[62875]: value = "task-2180274" [ 2123.927787] env[62875]: _type = "Task" [ 2123.927787] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.939393] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.961464] env[62875]: DEBUG oslo_vmware.api [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180271, 'name': PowerOffVM_Task, 'duration_secs': 0.372323} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.966296] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2123.966557] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2123.966934] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180272, 'name': Rename_Task, 'duration_secs': 0.155865} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.967241] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75bd4ad6-4b7f-4e52-bfda-5def5dc66427 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.969334] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2123.969656] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff3780bb-44fd-4a79-b87c-0a1b0e7b4880 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.976443] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2123.976443] env[62875]: value = "task-2180275" [ 2123.976443] env[62875]: _type = "Task" [ 2123.976443] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.984936] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.985612] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.927s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.988043] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.575s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.988043] env[62875]: DEBUG nova.objects.instance [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62875) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2124.009940] env[62875]: INFO nova.scheduler.client.report [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted allocations for instance 409b6902-f9ef-469b-a9db-4e93f764d199 [ 2124.088150] env[62875]: DEBUG nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2124.115419] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2124.115733] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2124.115795] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2124.115979] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2124.116147] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2124.116295] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2124.116504] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2124.116751] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2124.116845] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2124.116991] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2124.117181] env[62875]: DEBUG nova.virt.hardware [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2124.118366] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd2d245-4e07-4714-9e53-c3398c5c0db8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.131497] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180266, 'name': ReconfigVM_Task, 'duration_secs': 1.584362} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.133129] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f245bd-a6e4-450d-a604-6611b618bd3c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.136946] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 7c081de0-1952-4ca8-8f6f-80102e20bff0/7c081de0-1952-4ca8-8f6f-80102e20bff0.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2124.137729] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af677fb7-26d6-4216-aa7d-fb06db50cc7a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.150571] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2124.150571] env[62875]: value = "task-2180277" [ 2124.150571] env[62875]: _type = "Task" [ 2124.150571] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.159214] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180277, 'name': Rename_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.247536] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2124.247797] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2124.248042] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Deleting the datastore file [datastore1] 7f16b893-02e4-4395-b787-f82bc4549e4a {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2124.248707] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb6daf1f-7a0c-4a7f-b4ab-1aa8f043574a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.254263] env[62875]: DEBUG oslo_vmware.api [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for the task: (returnval){ [ 2124.254263] env[62875]: value = "task-2180278" [ 2124.254263] env[62875]: _type = "Task" [ 2124.254263] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.262142] env[62875]: DEBUG oslo_vmware.api [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.311543] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180273, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.440727] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128971} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.441446] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2124.442527] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e348afb1-4417-491a-bf78-54042b184d21 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.467869] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 380229e2-25ba-47cb-a6ca-167b9d9672eb/380229e2-25ba-47cb-a6ca-167b9d9672eb.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2124.468230] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-515e0a0f-34e7-4bb7-99f7-893b0b302196 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.498508] env[62875]: DEBUG oslo_vmware.api [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180275, 'name': PowerOnVM_Task, 'duration_secs': 0.477293} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.498508] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2124.498979] env[62875]: DEBUG nova.compute.manager [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2124.499410] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2124.499410] env[62875]: value = "task-2180279" [ 2124.499410] env[62875]: _type = "Task" [ 2124.499410] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.502217] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fc0f25-0508-4aa7-81a7-4332e148afee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.519009] env[62875]: DEBUG oslo_concurrency.lockutils [None req-53e58d17-91ca-4652-bcb7-57a671a1a4bb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "409b6902-f9ef-469b-a9db-4e93f764d199" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.455s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.521034] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.660364] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180277, 'name': Rename_Task, 'duration_secs': 0.1733} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.660798] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2124.661084] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed45ffc6-cb48-4032-beb1-3ccda9b7083b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.667133] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2124.667133] env[62875]: value = "task-2180280" [ 2124.667133] env[62875]: _type = "Task" [ 2124.667133] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.674839] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.763996] env[62875]: DEBUG oslo_vmware.api [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Task: {'id': task-2180278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133878} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.764476] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2124.764476] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2124.764714] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2124.764890] env[62875]: INFO nova.compute.manager [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Took 1.36 seconds to destroy the instance on the hypervisor. [ 2124.765143] env[62875]: DEBUG oslo.service.loopingcall [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2124.765331] env[62875]: DEBUG nova.compute.manager [-] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2124.765426] env[62875]: DEBUG nova.network.neutron [-] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2124.812276] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180273, 'name': ReconfigVM_Task, 'duration_secs': 0.599091} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.812625] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2124.813572] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd42f3ef-9afb-4df0-9da1-1333c91fe047 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.847191] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6229f6c9-fb89-46c4-ab7a-6f1803fa4f81 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.864303] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2124.864303] env[62875]: value = "task-2180281" [ 2124.864303] env[62875]: _type = "Task" [ 2124.864303] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.873058] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180281, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.003444] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2218973e-9ab0-45f1-ad64-edede0978009 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.003444] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.308s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.003821] env[62875]: DEBUG nova.objects.instance [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lazy-loading 'resources' on Instance uuid 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2125.019686] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.032200] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.077169] env[62875]: DEBUG nova.compute.manager [req-51b11539-6a0e-47ff-92d0-954527f36080 req-9fd0943d-e4ee-450f-bf93-4fcfdd7fa8cf service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Received event network-vif-deleted-db4ba514-bcb8-4bac-a3dc-400ffde442b7 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2125.078199] env[62875]: INFO nova.compute.manager [req-51b11539-6a0e-47ff-92d0-954527f36080 req-9fd0943d-e4ee-450f-bf93-4fcfdd7fa8cf service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Neutron deleted interface db4ba514-bcb8-4bac-a3dc-400ffde442b7; detaching it from the instance and deleting it from the info cache [ 2125.078741] env[62875]: DEBUG nova.network.neutron [req-51b11539-6a0e-47ff-92d0-954527f36080 req-9fd0943d-e4ee-450f-bf93-4fcfdd7fa8cf service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.140107] env[62875]: DEBUG nova.network.neutron [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Successfully updated port: 821254fd-eb8e-4958-8bce-51b2447b3ee9 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2125.177339] env[62875]: DEBUG oslo_vmware.api [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180280, 'name': PowerOnVM_Task, 'duration_secs': 0.437905} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.177864] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2125.178038] env[62875]: INFO nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Took 10.21 seconds to spawn the instance on the hypervisor. [ 2125.178232] env[62875]: DEBUG nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2125.178994] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b405c9-44ef-417f-917e-530e559a1ff6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.375264] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180281, 'name': ReconfigVM_Task, 'duration_secs': 0.158113} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.375576] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2125.375825] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aebcb903-6d7a-4810-ad22-8b09d7a21fda {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.382836] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2125.382836] env[62875]: value = "task-2180282" [ 2125.382836] env[62875]: _type = "Task" [ 2125.382836] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.391129] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180282, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.488523] env[62875]: DEBUG nova.compute.manager [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Received event network-changed-c9763427-3b9c-4a83-a0df-a284b4cf99a0 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2125.488700] env[62875]: DEBUG nova.compute.manager [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Refreshing instance network info cache due to event network-changed-c9763427-3b9c-4a83-a0df-a284b4cf99a0. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2125.488932] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Acquiring lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.489092] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Acquired lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.489257] env[62875]: DEBUG nova.network.neutron [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Refreshing network info cache for port c9763427-3b9c-4a83-a0df-a284b4cf99a0 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2125.517381] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180279, 'name': ReconfigVM_Task, 'duration_secs': 0.741596} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.517849] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 380229e2-25ba-47cb-a6ca-167b9d9672eb/380229e2-25ba-47cb-a6ca-167b9d9672eb.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2125.518469] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1923e53-72db-4139-859e-2cbe248d1399 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.524623] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2125.524623] env[62875]: value = "task-2180283" [ 2125.524623] env[62875]: _type = "Task" [ 2125.524623] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.533404] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180283, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.553410] env[62875]: DEBUG nova.network.neutron [-] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.581313] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5e1cace-fccb-4720-88b6-46cfd77ae8da {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.594024] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97dd7a12-e3a0-4e8a-9f66-b811c5967ad1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.634074] env[62875]: DEBUG nova.compute.manager [req-51b11539-6a0e-47ff-92d0-954527f36080 req-9fd0943d-e4ee-450f-bf93-4fcfdd7fa8cf service nova] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Detach interface failed, port_id=db4ba514-bcb8-4bac-a3dc-400ffde442b7, reason: Instance 7f16b893-02e4-4395-b787-f82bc4549e4a could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2125.642790] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "refresh_cache-346f4371-3029-4710-9163-08cf36196207" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.643040] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "refresh_cache-346f4371-3029-4710-9163-08cf36196207" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.643180] env[62875]: DEBUG nova.network.neutron [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2125.704540] env[62875]: INFO nova.compute.manager [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Took 30.10 seconds to build instance. [ 2125.780533] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d80f5dc-22a3-444d-9d9c-78122ade14cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.789297] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2313bfa0-f071-4d7f-ad90-c6732b816b67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.823664] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097f6cd3-3078-494a-90ab-60e4e765f551 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.832181] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1a9a7d-6433-44bc-a354-22ff03e51220 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.845770] env[62875]: DEBUG nova.compute.provider_tree [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2125.897913] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180282, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.972694] env[62875]: DEBUG nova.network.neutron [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Successfully updated port: 88706d53-4951-4a59-9a6a-324554fac125 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2126.038580] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180283, 'name': Rename_Task, 'duration_secs': 0.264735} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.039126] env[62875]: DEBUG nova.network.neutron [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2126.041058] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2126.041325] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f7807d0-3039-4a29-85ca-d89589a2b2e9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.048970] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2126.048970] env[62875]: value = "task-2180284" [ 2126.048970] env[62875]: _type = "Task" [ 2126.048970] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.057894] env[62875]: INFO nova.compute.manager [-] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Took 1.29 seconds to deallocate network for instance. [ 2126.057894] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.207261] env[62875]: DEBUG nova.network.neutron [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2126.210153] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d40fbc35-b153-4848-a944-6f9664a5a697 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.609s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.214189] env[62875]: DEBUG nova.network.neutron [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.350210] env[62875]: DEBUG nova.scheduler.client.report [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2126.392887] env[62875]: DEBUG oslo_vmware.api [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180282, 'name': PowerOnVM_Task, 'duration_secs': 0.873541} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.393669] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2126.397062] env[62875]: DEBUG nova.compute.manager [None req-6a0474ee-23f6-4462-a515-4dbc04b7a7fc tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2126.400016] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772e5d28-f71a-4e36-9417-04d7a149957e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.425893] env[62875]: DEBUG nova.network.neutron [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Updating instance_info_cache with network_info: [{"id": "821254fd-eb8e-4958-8bce-51b2447b3ee9", "address": "fa:16:3e:9c:49:b7", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap821254fd-eb", "ovs_interfaceid": "821254fd-eb8e-4958-8bce-51b2447b3ee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.478725] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.479165] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "44a248f6-443c-4b7c-95f0-088f0cdb924d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.479377] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "44a248f6-443c-4b7c-95f0-088f0cdb924d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.479578] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "44a248f6-443c-4b7c-95f0-088f0cdb924d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.479771] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "44a248f6-443c-4b7c-95f0-088f0cdb924d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.479940] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "44a248f6-443c-4b7c-95f0-088f0cdb924d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.482192] env[62875]: INFO nova.compute.manager [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Terminating instance [ 2126.563600] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180284, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.564590] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.718674] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Releasing lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.718976] env[62875]: DEBUG nova.compute.manager [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] Received event network-vif-plugged-821254fd-eb8e-4958-8bce-51b2447b3ee9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2126.719202] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Acquiring lock "346f4371-3029-4710-9163-08cf36196207-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.719416] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Lock "346f4371-3029-4710-9163-08cf36196207-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.719584] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Lock "346f4371-3029-4710-9163-08cf36196207-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.719757] env[62875]: DEBUG nova.compute.manager [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] No waiting events found dispatching network-vif-plugged-821254fd-eb8e-4958-8bce-51b2447b3ee9 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2126.719955] env[62875]: WARNING nova.compute.manager [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] Received unexpected event network-vif-plugged-821254fd-eb8e-4958-8bce-51b2447b3ee9 for instance with vm_state building and task_state spawning. [ 2126.720163] env[62875]: DEBUG nova.compute.manager [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] Received event network-changed-821254fd-eb8e-4958-8bce-51b2447b3ee9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2126.721498] env[62875]: DEBUG nova.compute.manager [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] Refreshing instance network info cache due to event network-changed-821254fd-eb8e-4958-8bce-51b2447b3ee9. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2126.721498] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Acquiring lock "refresh_cache-346f4371-3029-4710-9163-08cf36196207" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.721498] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquired lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.721498] env[62875]: DEBUG nova.network.neutron [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2126.855748] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.853s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.858227] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.072s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.858892] env[62875]: DEBUG nova.objects.instance [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lazy-loading 'resources' on Instance uuid e811f624-2dda-468c-ab28-9744c300eb1d {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2126.871919] env[62875]: INFO nova.scheduler.client.report [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Deleted allocations for instance 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286 [ 2126.928323] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "refresh_cache-346f4371-3029-4710-9163-08cf36196207" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.928706] env[62875]: DEBUG nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Instance network_info: |[{"id": "821254fd-eb8e-4958-8bce-51b2447b3ee9", "address": "fa:16:3e:9c:49:b7", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap821254fd-eb", "ovs_interfaceid": "821254fd-eb8e-4958-8bce-51b2447b3ee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2126.929289] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Acquired lock "refresh_cache-346f4371-3029-4710-9163-08cf36196207" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.929477] env[62875]: DEBUG nova.network.neutron [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] Refreshing network info cache for port 821254fd-eb8e-4958-8bce-51b2447b3ee9 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2126.931577] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:49:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '821254fd-eb8e-4958-8bce-51b2447b3ee9', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2126.940748] env[62875]: DEBUG oslo.service.loopingcall [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2126.944759] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 346f4371-3029-4710-9163-08cf36196207] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2126.945334] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-220707e6-e478-4f23-adb6-6614136c06e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.967670] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2126.967670] env[62875]: value = "task-2180285" [ 2126.967670] env[62875]: _type = "Task" [ 2126.967670] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.976314] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180285, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.986357] env[62875]: DEBUG nova.compute.manager [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2126.986679] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2126.987518] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a9483f-6e5d-4ca9-83bb-ace93d421cec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.996878] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2126.997190] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dea9f5b9-4e2e-4bdc-b7dd-8eea90b86daa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.003842] env[62875]: DEBUG oslo_vmware.api [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2127.003842] env[62875]: value = "task-2180286" [ 2127.003842] env[62875]: _type = "Task" [ 2127.003842] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.015262] env[62875]: DEBUG oslo_vmware.api [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.055132] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.055858] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.055858] env[62875]: INFO nova.compute.manager [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Shelving [ 2127.063184] env[62875]: DEBUG oslo_vmware.api [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180284, 'name': PowerOnVM_Task, 'duration_secs': 0.955354} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.064186] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2127.064186] env[62875]: INFO nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Took 9.63 seconds to spawn the instance on the hypervisor. [ 2127.064186] env[62875]: DEBUG nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2127.064818] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab1295f-4511-43bf-8bae-2b4f471e1376 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.228799] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.229163] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.385469] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a8faac8b-5ba1-4fe6-bca7-9721a026cb42 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "76a058aa-9fdf-4a3d-9d1b-a50bb9f61286" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.396s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.477576] env[62875]: DEBUG nova.network.neutron [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2127.488518] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180285, 'name': CreateVM_Task, 'duration_secs': 0.460424} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.494094] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 346f4371-3029-4710-9163-08cf36196207] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2127.494094] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2127.494094] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2127.494094] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2127.494483] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3d35df4-eadf-46c1-b1ed-9d9e3aa2aab3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.500295] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2127.500295] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52809300-00be-6445-da8f-3e0ab16e1b42" [ 2127.500295] env[62875]: _type = "Task" [ 2127.500295] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.520950] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52809300-00be-6445-da8f-3e0ab16e1b42, 'name': SearchDatastore_Task, 'duration_secs': 0.008804} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.533846] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.533846] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2127.533846] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2127.533846] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2127.533846] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2127.533846] env[62875]: DEBUG oslo_vmware.api [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180286, 'name': PowerOffVM_Task, 'duration_secs': 0.250314} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.534494] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17943455-7b70-484c-b227-aae41ec2077d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.536850] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2127.537108] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2127.537765] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c6fae3c-932b-4b90-9667-d24ff0c6838c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.549795] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2127.550077] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2127.551137] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac52b751-3ce9-4bbd-95b5-97be135e6415 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.574373] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2127.574373] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521f793f-d199-0523-2736-f0b77997b5ea" [ 2127.574373] env[62875]: _type = "Task" [ 2127.574373] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.585274] env[62875]: INFO nova.compute.manager [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Took 29.32 seconds to build instance. [ 2127.603386] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521f793f-d199-0523-2736-f0b77997b5ea, 'name': SearchDatastore_Task, 'duration_secs': 0.024488} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.605756] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c23e5793-dce9-484f-ab7c-57299c49639f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.620066] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2127.620066] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520a9fb4-76c3-60a2-e75b-3f1dfe1cd24b" [ 2127.620066] env[62875]: _type = "Task" [ 2127.620066] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.621694] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2127.621972] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2127.622258] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleting the datastore file [datastore2] 44a248f6-443c-4b7c-95f0-088f0cdb924d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2127.627360] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dac51a7c-9a12-44a7-bbd8-c7c271e76930 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.639778] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520a9fb4-76c3-60a2-e75b-3f1dfe1cd24b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.642658] env[62875]: DEBUG oslo_vmware.api [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2127.642658] env[62875]: value = "task-2180288" [ 2127.642658] env[62875]: _type = "Task" [ 2127.642658] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.658616] env[62875]: DEBUG oslo_vmware.api [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.674188] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a52471-e7dd-4af0-88e2-4fa3a5478eab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.684843] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec2fe0d-dc0e-4f9b-abf5-db428590f078 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.693099] env[62875]: DEBUG nova.network.neutron [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] Updated VIF entry in instance network info cache for port 821254fd-eb8e-4958-8bce-51b2447b3ee9. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2127.693732] env[62875]: DEBUG nova.network.neutron [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] Updating instance_info_cache with network_info: [{"id": "821254fd-eb8e-4958-8bce-51b2447b3ee9", "address": "fa:16:3e:9c:49:b7", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap821254fd-eb", "ovs_interfaceid": "821254fd-eb8e-4958-8bce-51b2447b3ee9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2127.742562] env[62875]: DEBUG nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2127.753594] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98887b2e-3961-458a-999d-daa7a61c805a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.766593] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac921eec-783c-4969-b208-b04828112355 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.790415] env[62875]: DEBUG nova.compute.provider_tree [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2127.925207] env[62875]: DEBUG nova.compute.manager [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Received event network-vif-plugged-88706d53-4951-4a59-9a6a-324554fac125 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2127.925436] env[62875]: DEBUG oslo_concurrency.lockutils [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] Acquiring lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.925636] env[62875]: DEBUG oslo_concurrency.lockutils [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.925811] env[62875]: DEBUG oslo_concurrency.lockutils [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.925960] env[62875]: DEBUG nova.compute.manager [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] No waiting events found dispatching network-vif-plugged-88706d53-4951-4a59-9a6a-324554fac125 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2127.926560] env[62875]: WARNING nova.compute.manager [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Received unexpected event network-vif-plugged-88706d53-4951-4a59-9a6a-324554fac125 for instance with vm_state building and task_state spawning. [ 2127.926560] env[62875]: DEBUG nova.compute.manager [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Received event network-changed-88706d53-4951-4a59-9a6a-324554fac125 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2127.926821] env[62875]: DEBUG nova.compute.manager [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Refreshing instance network info cache due to event network-changed-88706d53-4951-4a59-9a6a-324554fac125. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2127.926879] env[62875]: DEBUG oslo_concurrency.lockutils [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] Acquiring lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2128.066932] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2128.067260] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2cf2900-8c42-4a0b-8473-0753be6d7ff8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.076671] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2128.076671] env[62875]: value = "task-2180289" [ 2128.076671] env[62875]: _type = "Task" [ 2128.076671] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.086413] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180289, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.096180] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3a95be8e-4dd3-449c-8019-5b46457f27ec tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "380229e2-25ba-47cb-a6ca-167b9d9672eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.835s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.134375] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520a9fb4-76c3-60a2-e75b-3f1dfe1cd24b, 'name': SearchDatastore_Task, 'duration_secs': 0.023586} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.134935] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.135236] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 346f4371-3029-4710-9163-08cf36196207/346f4371-3029-4710-9163-08cf36196207.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2128.135507] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80ab53b8-b028-4f2e-8010-db80d35ca800 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.147800] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2128.147800] env[62875]: value = "task-2180290" [ 2128.147800] env[62875]: _type = "Task" [ 2128.147800] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.153997] env[62875]: DEBUG oslo_vmware.api [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193976} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.154565] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2128.154774] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2128.155266] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2128.155266] env[62875]: INFO nova.compute.manager [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2128.155401] env[62875]: DEBUG oslo.service.loopingcall [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2128.155563] env[62875]: DEBUG nova.compute.manager [-] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2128.155673] env[62875]: DEBUG nova.network.neutron [-] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2128.160026] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180290, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.248184] env[62875]: DEBUG oslo_concurrency.lockutils [req-ffa8825b-488a-4581-966b-6a687cb32047 req-4c5851b3-683d-43d3-9c0d-33c51a97a349 service nova] Releasing lock "refresh_cache-346f4371-3029-4710-9163-08cf36196207" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.260476] env[62875]: DEBUG nova.network.neutron [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Updating instance_info_cache with network_info: [{"id": "c9763427-3b9c-4a83-a0df-a284b4cf99a0", "address": "fa:16:3e:34:28:af", "network": {"id": "9c7fa6c3-5428-4cc4-871e-61bceaeaf740", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-868617777", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9763427-3b", "ovs_interfaceid": "c9763427-3b9c-4a83-a0df-a284b4cf99a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "88706d53-4951-4a59-9a6a-324554fac125", "address": "fa:16:3e:c2:6a:cd", "network": {"id": "65ac5479-1301-4af5-8c4d-e8271afb5440", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1628466981", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88706d53-49", "ovs_interfaceid": "88706d53-4951-4a59-9a6a-324554fac125", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2128.282214] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.294170] env[62875]: DEBUG nova.scheduler.client.report [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2128.592189] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180289, 'name': PowerOffVM_Task, 'duration_secs': 0.222737} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.592664] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2128.593649] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c358d83a-ae57-496a-9e7d-1a09dd2bd135 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.618023] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd45e1d-b677-44bb-b1a8-215fe67c3325 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.657292] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180290, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492413} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.657686] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 346f4371-3029-4710-9163-08cf36196207/346f4371-3029-4710-9163-08cf36196207.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2128.657819] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2128.658009] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e13c80c3-1513-4c10-8031-458fea645097 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.664720] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2128.664720] env[62875]: value = "task-2180291" [ 2128.664720] env[62875]: _type = "Task" [ 2128.664720] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.672881] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180291, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.763613] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Releasing lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.763953] env[62875]: DEBUG nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Instance network_info: |[{"id": "c9763427-3b9c-4a83-a0df-a284b4cf99a0", "address": "fa:16:3e:34:28:af", "network": {"id": "9c7fa6c3-5428-4cc4-871e-61bceaeaf740", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-868617777", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9763427-3b", "ovs_interfaceid": "c9763427-3b9c-4a83-a0df-a284b4cf99a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "88706d53-4951-4a59-9a6a-324554fac125", "address": "fa:16:3e:c2:6a:cd", "network": {"id": "65ac5479-1301-4af5-8c4d-e8271afb5440", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1628466981", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88706d53-49", "ovs_interfaceid": "88706d53-4951-4a59-9a6a-324554fac125", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2128.764278] env[62875]: DEBUG oslo_concurrency.lockutils [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] Acquired lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2128.764461] env[62875]: DEBUG nova.network.neutron [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Refreshing network info cache for port 88706d53-4951-4a59-9a6a-324554fac125 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2128.765603] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:28:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9763427-3b9c-4a83-a0df-a284b4cf99a0', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:6a:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '88706d53-4951-4a59-9a6a-324554fac125', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2128.775860] env[62875]: DEBUG oslo.service.loopingcall [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2128.776982] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2128.777283] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e3af852-1c86-4117-a92a-289457dfe585 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.799300] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.941s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.803055] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.605s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.803521] env[62875]: DEBUG nova.objects.instance [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lazy-loading 'resources' on Instance uuid a19f5bee-ece8-4aa3-8c33-9474da385238 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2128.804490] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2128.804490] env[62875]: value = "task-2180292" [ 2128.804490] env[62875]: _type = "Task" [ 2128.804490] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.813947] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180292, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.821538] env[62875]: INFO nova.scheduler.client.report [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Deleted allocations for instance e811f624-2dda-468c-ab28-9744c300eb1d [ 2129.129906] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2129.130252] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7f5e9660-a362-4145-9b87-8872eca994ca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.137841] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2129.137841] env[62875]: value = "task-2180293" [ 2129.137841] env[62875]: _type = "Task" [ 2129.137841] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.146409] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180293, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.177019] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082356} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.177019] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2129.177019] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1f918e-050c-4b82-9c64-0e2e0dba77b0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.202991] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 346f4371-3029-4710-9163-08cf36196207/346f4371-3029-4710-9163-08cf36196207.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2129.203502] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbd7aced-9c32-4dbe-bbc0-77b5a485c3e2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.227880] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2129.227880] env[62875]: value = "task-2180294" [ 2129.227880] env[62875]: _type = "Task" [ 2129.227880] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.234859] env[62875]: DEBUG nova.network.neutron [-] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.235609] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.317768] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180292, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.328925] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bc8c2fd4-0c17-4755-98f0-9024b9882e1d tempest-ImagesNegativeTestJSON-488942989 tempest-ImagesNegativeTestJSON-488942989-project-member] Lock "e811f624-2dda-468c-ab28-9744c300eb1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.497s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.534658] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1a844d-3116-4865-95ab-4e0d2b5b906d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.543694] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34901e54-b4b2-49f3-b621-758a46014f3b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.576384] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed533af8-df31-4fdc-b470-cedf34db129d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.586838] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c52c6f-0c77-4604-a126-33624342f185 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.605144] env[62875]: DEBUG nova.compute.provider_tree [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2129.647317] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180293, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.737299] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180294, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.738851] env[62875]: INFO nova.compute.manager [-] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Took 1.58 seconds to deallocate network for instance. [ 2129.761908] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "380229e2-25ba-47cb-a6ca-167b9d9672eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.762215] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "380229e2-25ba-47cb-a6ca-167b9d9672eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.762441] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "380229e2-25ba-47cb-a6ca-167b9d9672eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.762640] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "380229e2-25ba-47cb-a6ca-167b9d9672eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.762817] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "380229e2-25ba-47cb-a6ca-167b9d9672eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.764913] env[62875]: INFO nova.compute.manager [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Terminating instance [ 2129.800509] env[62875]: DEBUG nova.network.neutron [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Updated VIF entry in instance network info cache for port 88706d53-4951-4a59-9a6a-324554fac125. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2129.800989] env[62875]: DEBUG nova.network.neutron [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Updating instance_info_cache with network_info: [{"id": "c9763427-3b9c-4a83-a0df-a284b4cf99a0", "address": "fa:16:3e:34:28:af", "network": {"id": "9c7fa6c3-5428-4cc4-871e-61bceaeaf740", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-868617777", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9763427-3b", "ovs_interfaceid": "c9763427-3b9c-4a83-a0df-a284b4cf99a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "88706d53-4951-4a59-9a6a-324554fac125", "address": "fa:16:3e:c2:6a:cd", "network": {"id": "65ac5479-1301-4af5-8c4d-e8271afb5440", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1628466981", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88706d53-49", "ovs_interfaceid": "88706d53-4951-4a59-9a6a-324554fac125", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.819835] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180292, 'name': CreateVM_Task, 'duration_secs': 0.943487} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.820028] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2129.821070] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2129.821070] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2129.821404] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2129.821665] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-348017c1-4b8d-4623-8382-01e9813f9825 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.827369] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2129.827369] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5223b131-181f-da13-d475-1ea8e28b795c" [ 2129.827369] env[62875]: _type = "Task" [ 2129.827369] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.836812] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5223b131-181f-da13-d475-1ea8e28b795c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.108773] env[62875]: DEBUG nova.scheduler.client.report [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2130.135788] env[62875]: DEBUG nova.compute.manager [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Received event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2130.135788] env[62875]: DEBUG nova.compute.manager [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing instance network info cache due to event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2130.135788] env[62875]: DEBUG oslo_concurrency.lockutils [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] Acquiring lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2130.135907] env[62875]: DEBUG oslo_concurrency.lockutils [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] Acquired lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.136096] env[62875]: DEBUG nova.network.neutron [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2130.148671] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180293, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.238796] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180294, 'name': ReconfigVM_Task, 'duration_secs': 0.675336} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.239112] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 346f4371-3029-4710-9163-08cf36196207/346f4371-3029-4710-9163-08cf36196207.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2130.239747] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff2a5cee-b407-4271-be8c-dbdb89f7c05c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.245557] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.246939] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2130.246939] env[62875]: value = "task-2180295" [ 2130.246939] env[62875]: _type = "Task" [ 2130.246939] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.255548] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180295, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.269408] env[62875]: DEBUG nova.compute.manager [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2130.269634] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2130.271188] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b00d27-38e5-4b93-a7ab-54b4603989d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.279152] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2130.279465] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfd0ff76-c98a-4d14-a3ba-5d19c17e9275 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.286437] env[62875]: DEBUG oslo_vmware.api [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2130.286437] env[62875]: value = "task-2180296" [ 2130.286437] env[62875]: _type = "Task" [ 2130.286437] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.294662] env[62875]: DEBUG oslo_vmware.api [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180296, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.303449] env[62875]: DEBUG oslo_concurrency.lockutils [req-bf0ee726-c511-4e33-8c4a-411a649c7cd2 req-cd65877c-bc17-41fb-ad44-3f07382731aa service nova] Releasing lock "refresh_cache-9dd30ca8-bf15-4a87-b055-3575445f4b79" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2130.337535] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5223b131-181f-da13-d475-1ea8e28b795c, 'name': SearchDatastore_Task, 'duration_secs': 0.015729} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.337908] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2130.338136] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2130.338383] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2130.338623] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.338873] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2130.339233] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48457a01-1c99-4576-9825-3eecdaa4121e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.349461] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2130.349790] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2130.350731] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dfd7aaa-7ebb-48a0-8138-7d6d98748e13 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.358812] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2130.358812] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521dbc60-ebf0-94de-7614-ae4194cbb6d4" [ 2130.358812] env[62875]: _type = "Task" [ 2130.358812] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.366905] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521dbc60-ebf0-94de-7614-ae4194cbb6d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.615276] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.617587] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.586s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2130.617781] env[62875]: DEBUG nova.objects.instance [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62875) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2130.639105] env[62875]: INFO nova.scheduler.client.report [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Deleted allocations for instance a19f5bee-ece8-4aa3-8c33-9474da385238 [ 2130.665095] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180293, 'name': CreateSnapshot_Task, 'duration_secs': 1.28856} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.666589] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2130.667757] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56527df4-faa3-4981-8983-3645ede576d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.758676] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180295, 'name': Rename_Task, 'duration_secs': 0.181354} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.759364] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2130.760724] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31d8c3a2-181d-447f-954f-39806e183d9d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.769021] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2130.769021] env[62875]: value = "task-2180297" [ 2130.769021] env[62875]: _type = "Task" [ 2130.769021] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.774698] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180297, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.795578] env[62875]: DEBUG oslo_vmware.api [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180296, 'name': PowerOffVM_Task, 'duration_secs': 0.196294} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.796317] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2130.796834] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2130.797727] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1f5df60-06e0-4b82-ab6a-20bf88d1e69a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.873444] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521dbc60-ebf0-94de-7614-ae4194cbb6d4, 'name': SearchDatastore_Task, 'duration_secs': 0.009273} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.873444] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac2c190c-50a8-4267-b506-b981e79595f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.876871] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2130.876871] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a9dc43-b5ad-0f16-3abb-53f69a9e4b44" [ 2130.876871] env[62875]: _type = "Task" [ 2130.876871] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.885683] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a9dc43-b5ad-0f16-3abb-53f69a9e4b44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.911298] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2130.911298] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2130.911298] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleting the datastore file [datastore2] 380229e2-25ba-47cb-a6ca-167b9d9672eb {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2130.911298] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb6bca8a-c25b-403f-a9ad-d2f993834de1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.925670] env[62875]: DEBUG oslo_vmware.api [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2130.925670] env[62875]: value = "task-2180299" [ 2130.925670] env[62875]: _type = "Task" [ 2130.925670] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.938409] env[62875]: DEBUG oslo_vmware.api [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.065303] env[62875]: DEBUG nova.network.neutron [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updated VIF entry in instance network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2131.065682] env[62875]: DEBUG nova.network.neutron [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [{"id": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "address": "fa:16:3e:b9:e1:fb", "network": {"id": "260943be-4698-4425-ae4b-22d21e036685", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1758636538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82e42c29c6dd480c87096bea1977074d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e6fd20c-b2", "ovs_interfaceid": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2131.157312] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cf25863f-d664-46f2-b41d-7de34846bc20 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "a19f5bee-ece8-4aa3-8c33-9474da385238" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.199s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.187725] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2131.188463] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6eb8702a-35c0-480b-8e14-cb9740d2ca67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.200143] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2131.200143] env[62875]: value = "task-2180300" [ 2131.200143] env[62875]: _type = "Task" [ 2131.200143] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.212462] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180300, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.279496] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180297, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.391178] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a9dc43-b5ad-0f16-3abb-53f69a9e4b44, 'name': SearchDatastore_Task, 'duration_secs': 0.010062} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.391178] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2131.391178] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 9dd30ca8-bf15-4a87-b055-3575445f4b79/9dd30ca8-bf15-4a87-b055-3575445f4b79.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2131.391790] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5efa245d-65ff-4052-87ba-e70c0c0e43b3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.399474] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2131.399474] env[62875]: value = "task-2180301" [ 2131.399474] env[62875]: _type = "Task" [ 2131.399474] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.411376] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180301, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.438121] env[62875]: DEBUG oslo_vmware.api [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176431} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.438121] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2131.438121] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2131.438121] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2131.438121] env[62875]: INFO nova.compute.manager [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2131.438121] env[62875]: DEBUG oslo.service.loopingcall [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2131.438432] env[62875]: DEBUG nova.compute.manager [-] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2131.438432] env[62875]: DEBUG nova.network.neutron [-] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2131.503771] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "d0c4095f-2d78-4055-b568-7e70e7c4c182" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.503965] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "d0c4095f-2d78-4055-b568-7e70e7c4c182" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.504764] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "d0c4095f-2d78-4055-b568-7e70e7c4c182-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.504990] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "d0c4095f-2d78-4055-b568-7e70e7c4c182-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.505203] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "d0c4095f-2d78-4055-b568-7e70e7c4c182-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.507878] env[62875]: INFO nova.compute.manager [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Terminating instance [ 2131.571337] env[62875]: DEBUG oslo_concurrency.lockutils [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] Releasing lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2131.571337] env[62875]: DEBUG nova.compute.manager [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Received event network-vif-deleted-2a8a41b9-756a-4ad7-b2f3-3a05d58a308d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2131.571337] env[62875]: DEBUG nova.compute.manager [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Received event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2131.571337] env[62875]: DEBUG nova.compute.manager [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing instance network info cache due to event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2131.571337] env[62875]: DEBUG oslo_concurrency.lockutils [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] Acquiring lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2131.571337] env[62875]: DEBUG oslo_concurrency.lockutils [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] Acquired lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2131.571337] env[62875]: DEBUG nova.network.neutron [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2131.635900] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5cc2ac15-1799-48ec-9826-0f5a5edb3e89 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.635900] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.070s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.635900] env[62875]: DEBUG nova.objects.instance [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lazy-loading 'resources' on Instance uuid 7f16b893-02e4-4395-b787-f82bc4549e4a {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2131.714687] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180300, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.783105] env[62875]: DEBUG oslo_vmware.api [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180297, 'name': PowerOnVM_Task, 'duration_secs': 0.750762} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.783105] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2131.783105] env[62875]: INFO nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Took 7.69 seconds to spawn the instance on the hypervisor. [ 2131.783105] env[62875]: DEBUG nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2131.783842] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55386e18-8f98-4876-8829-0f6eb59d36d4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.914740] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180301, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.988753] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.988753] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.988753] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.988753] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.988753] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.991507] env[62875]: INFO nova.compute.manager [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Terminating instance [ 2132.012109] env[62875]: DEBUG nova.compute.manager [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2132.012337] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2132.013275] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516e67e2-b793-4bc9-b6f3-7c8dff46f1ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.021351] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2132.021587] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47da97b4-a44b-47df-a8bb-e6f49a8bf3f7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.028669] env[62875]: DEBUG oslo_vmware.api [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2132.028669] env[62875]: value = "task-2180302" [ 2132.028669] env[62875]: _type = "Task" [ 2132.028669] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.038813] env[62875]: DEBUG oslo_vmware.api [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.213136] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180300, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.308932] env[62875]: INFO nova.compute.manager [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Took 19.66 seconds to build instance. [ 2132.365696] env[62875]: DEBUG nova.compute.manager [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Received event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2132.365898] env[62875]: DEBUG nova.compute.manager [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing instance network info cache due to event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2132.366313] env[62875]: DEBUG oslo_concurrency.lockutils [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] Acquiring lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.409811] env[62875]: DEBUG nova.network.neutron [-] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.414408] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180301, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551411} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.415483] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e63e92-9126-48aa-a16d-a98eef711140 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.418686] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 9dd30ca8-bf15-4a87-b055-3575445f4b79/9dd30ca8-bf15-4a87-b055-3575445f4b79.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2132.419120] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2132.420095] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3896155-4dca-41c0-acee-3019be6102e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.430212] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26f10ef-467b-4cb3-832a-0d85d4c36123 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.434308] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2132.434308] env[62875]: value = "task-2180303" [ 2132.434308] env[62875]: _type = "Task" [ 2132.434308] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.475641] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73bbc4c-9e0e-4693-aefc-d3d0949dbccf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.482716] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180303, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.487411] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20007e66-da43-44d9-a2e2-545365e75632 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.503093] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "refresh_cache-737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.503285] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquired lock "refresh_cache-737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.503503] env[62875]: DEBUG nova.network.neutron [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2132.505982] env[62875]: DEBUG nova.compute.provider_tree [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2132.539274] env[62875]: DEBUG oslo_vmware.api [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180302, 'name': PowerOffVM_Task, 'duration_secs': 0.410641} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.539601] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2132.539777] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2132.540053] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8b8e763-4e16-42c2-9d69-0989919c2ea3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.578749] env[62875]: DEBUG nova.network.neutron [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updated VIF entry in instance network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2132.582564] env[62875]: DEBUG nova.network.neutron [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [{"id": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "address": "fa:16:3e:b9:e1:fb", "network": {"id": "260943be-4698-4425-ae4b-22d21e036685", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1758636538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82e42c29c6dd480c87096bea1977074d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e6fd20c-b2", "ovs_interfaceid": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.687935] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2132.688226] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2132.688414] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Deleting the datastore file [datastore2] d0c4095f-2d78-4055-b568-7e70e7c4c182 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2132.688694] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad76507f-23fd-446d-ba76-fbb7d65facf1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.695664] env[62875]: DEBUG oslo_vmware.api [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for the task: (returnval){ [ 2132.695664] env[62875]: value = "task-2180305" [ 2132.695664] env[62875]: _type = "Task" [ 2132.695664] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.705741] env[62875]: DEBUG oslo_vmware.api [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.715021] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180300, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.811794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0a6b0d7-af2f-4ff5-aacd-d0e4a7d8ebe1 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "346f4371-3029-4710-9163-08cf36196207" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.187s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.919839] env[62875]: INFO nova.compute.manager [-] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Took 1.48 seconds to deallocate network for instance. [ 2132.945682] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073654} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.946096] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2132.946997] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f9ac69-369b-4f91-b413-632948c64e93 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.973348] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 9dd30ca8-bf15-4a87-b055-3575445f4b79/9dd30ca8-bf15-4a87-b055-3575445f4b79.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2132.974320] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d37b13a-dfaa-48a7-a9c8-6661f79ce133 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.994712] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2132.994712] env[62875]: value = "task-2180306" [ 2132.994712] env[62875]: _type = "Task" [ 2132.994712] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.004375] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180306, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.008734] env[62875]: DEBUG nova.scheduler.client.report [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2133.032693] env[62875]: DEBUG nova.network.neutron [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2133.082498] env[62875]: DEBUG oslo_concurrency.lockutils [req-9351a0f8-2f38-46a1-9975-a37c967ea501 req-f68ec2d7-3a41-4a96-ab68-110251871275 service nova] Releasing lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.083478] env[62875]: DEBUG oslo_concurrency.lockutils [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] Acquired lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2133.083737] env[62875]: DEBUG nova.network.neutron [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2133.099874] env[62875]: DEBUG nova.network.neutron [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.206040] env[62875]: DEBUG oslo_vmware.api [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Task: {'id': task-2180305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142846} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.209541] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2133.209740] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2133.209924] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2133.210117] env[62875]: INFO nova.compute.manager [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Took 1.20 seconds to destroy the instance on the hypervisor. [ 2133.210358] env[62875]: DEBUG oslo.service.loopingcall [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2133.210569] env[62875]: DEBUG nova.compute.manager [-] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2133.210692] env[62875]: DEBUG nova.network.neutron [-] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2133.217425] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180300, 'name': CloneVM_Task} progress is 95%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.433794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.506265] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180306, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.513493] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.516492] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.234s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.517963] env[62875]: INFO nova.compute.claims [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2133.551895] env[62875]: INFO nova.scheduler.client.report [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Deleted allocations for instance 7f16b893-02e4-4395-b787-f82bc4549e4a [ 2133.602128] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Releasing lock "refresh_cache-737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.605236] env[62875]: DEBUG nova.compute.manager [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2133.605236] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2133.605236] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7124896d-c8e6-4039-95c5-3f4ff349801b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.616102] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2133.616584] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1661dae0-34a4-49af-99c7-a92c1c3d64d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.624180] env[62875]: DEBUG oslo_vmware.api [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2133.624180] env[62875]: value = "task-2180307" [ 2133.624180] env[62875]: _type = "Task" [ 2133.624180] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.637148] env[62875]: DEBUG oslo_vmware.api [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.718106] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180300, 'name': CloneVM_Task, 'duration_secs': 2.231644} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.718485] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Created linked-clone VM from snapshot [ 2133.719364] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e9fa83-a21d-4470-8c1d-8e23a838e371 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.730286] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Uploading image f1e4bb61-3bf6-4830-a84f-9d4f17bcb1f3 {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2133.764775] env[62875]: DEBUG oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2133.764775] env[62875]: value = "vm-444951" [ 2133.764775] env[62875]: _type = "VirtualMachine" [ 2133.764775] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2133.765191] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7211cea4-eb00-4ea4-9706-90191c5187c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.774316] env[62875]: DEBUG oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lease: (returnval){ [ 2133.774316] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52102465-6735-6a92-f13f-846c71928586" [ 2133.774316] env[62875]: _type = "HttpNfcLease" [ 2133.774316] env[62875]: } obtained for exporting VM: (result){ [ 2133.774316] env[62875]: value = "vm-444951" [ 2133.774316] env[62875]: _type = "VirtualMachine" [ 2133.774316] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2133.774557] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the lease: (returnval){ [ 2133.774557] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52102465-6735-6a92-f13f-846c71928586" [ 2133.774557] env[62875]: _type = "HttpNfcLease" [ 2133.774557] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2133.781279] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2133.781279] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52102465-6735-6a92-f13f-846c71928586" [ 2133.781279] env[62875]: _type = "HttpNfcLease" [ 2133.781279] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2133.979089] env[62875]: DEBUG nova.network.neutron [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updated VIF entry in instance network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2133.979089] env[62875]: DEBUG nova.network.neutron [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [{"id": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "address": "fa:16:3e:b9:e1:fb", "network": {"id": "260943be-4698-4425-ae4b-22d21e036685", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1758636538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82e42c29c6dd480c87096bea1977074d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e6fd20c-b2", "ovs_interfaceid": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.006152] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180306, 'name': ReconfigVM_Task, 'duration_secs': 0.568258} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.006477] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 9dd30ca8-bf15-4a87-b055-3575445f4b79/9dd30ca8-bf15-4a87-b055-3575445f4b79.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2134.007161] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-440b3ff1-fa1b-4756-ac14-33524c4f32e7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.013700] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2134.013700] env[62875]: value = "task-2180309" [ 2134.013700] env[62875]: _type = "Task" [ 2134.013700] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.024742] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180309, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.063967] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf7b077-3292-45e3-b6d1-7c9ffad8f59d tempest-ServerTagsTestJSON-718510195 tempest-ServerTagsTestJSON-718510195-project-member] Lock "7f16b893-02e4-4395-b787-f82bc4549e4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.171s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.135306] env[62875]: DEBUG oslo_vmware.api [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180307, 'name': PowerOffVM_Task, 'duration_secs': 0.219901} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.135618] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2134.135932] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2134.136250] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac28aa22-44b3-4931-9fc6-e1d7c2b55007 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.163278] env[62875]: DEBUG nova.network.neutron [-] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.169495] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2134.169774] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2134.169966] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Deleting the datastore file [datastore2] 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2134.170228] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ea351cd-cb0d-4697-a547-a9c79be5ef37 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.179253] env[62875]: DEBUG oslo_vmware.api [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for the task: (returnval){ [ 2134.179253] env[62875]: value = "task-2180311" [ 2134.179253] env[62875]: _type = "Task" [ 2134.179253] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.191125] env[62875]: DEBUG oslo_vmware.api [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.286138] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2134.286138] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52102465-6735-6a92-f13f-846c71928586" [ 2134.286138] env[62875]: _type = "HttpNfcLease" [ 2134.286138] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2134.286456] env[62875]: DEBUG oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2134.286456] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52102465-6735-6a92-f13f-846c71928586" [ 2134.286456] env[62875]: _type = "HttpNfcLease" [ 2134.286456] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2134.288870] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212dcd69-5424-4922-a2e5-1d2e8d01eb92 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.296573] env[62875]: DEBUG oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5573d-0fc5-e065-1897-4dc22b36a60f/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2134.296772] env[62875]: DEBUG oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5573d-0fc5-e065-1897-4dc22b36a60f/disk-0.vmdk for reading. {{(pid=62875) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2134.474972] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-360ecebc-eec1-48b1-b6bf-ab04fdf70a31 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.481701] env[62875]: DEBUG nova.compute.manager [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Received event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2134.481701] env[62875]: DEBUG nova.compute.manager [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing instance network info cache due to event network-changed-3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2134.481701] env[62875]: DEBUG oslo_concurrency.lockutils [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] Acquiring lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.484498] env[62875]: DEBUG oslo_concurrency.lockutils [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] Releasing lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.484756] env[62875]: DEBUG nova.compute.manager [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Received event network-vif-deleted-34058ec7-a269-4392-a5aa-933b163e8602 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2134.485026] env[62875]: INFO nova.compute.manager [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Neutron deleted interface 34058ec7-a269-4392-a5aa-933b163e8602; detaching it from the instance and deleting it from the info cache [ 2134.487634] env[62875]: DEBUG nova.network.neutron [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.487634] env[62875]: DEBUG oslo_concurrency.lockutils [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] Acquired lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.487634] env[62875]: DEBUG nova.network.neutron [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Refreshing network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2134.531881] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180309, 'name': Rename_Task, 'duration_secs': 0.187205} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.531881] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2134.532238] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddec4ee9-d339-46a2-8132-36de36f775b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.540098] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2134.540098] env[62875]: value = "task-2180312" [ 2134.540098] env[62875]: _type = "Task" [ 2134.540098] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.548222] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180312, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.654855] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "8f817564-b224-4dcb-bd8c-4d63509a5628" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.655126] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "8f817564-b224-4dcb-bd8c-4d63509a5628" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.655336] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "8f817564-b224-4dcb-bd8c-4d63509a5628-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.655522] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "8f817564-b224-4dcb-bd8c-4d63509a5628-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.655691] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "8f817564-b224-4dcb-bd8c-4d63509a5628-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.657732] env[62875]: INFO nova.compute.manager [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Terminating instance [ 2134.666663] env[62875]: INFO nova.compute.manager [-] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Took 1.46 seconds to deallocate network for instance. [ 2134.687347] env[62875]: DEBUG oslo_vmware.api [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Task: {'id': task-2180311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092205} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.687501] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2134.687677] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2134.687870] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2134.688100] env[62875]: INFO nova.compute.manager [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2134.688382] env[62875]: DEBUG oslo.service.loopingcall [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2134.688946] env[62875]: DEBUG nova.compute.manager [-] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2134.689396] env[62875]: DEBUG nova.network.neutron [-] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2134.736664] env[62875]: DEBUG nova.network.neutron [-] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2134.759854] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1890623a-6856-436f-aae2-bca45068c085 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.768598] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189dc192-0c95-473b-be6b-b01b90246746 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.805045] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55ef941-2a44-4c3a-b45f-7c041a59f986 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.814022] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecc5ca2-4cf1-469e-a716-01f949c86cf0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.828141] env[62875]: DEBUG nova.compute.provider_tree [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2134.865240] env[62875]: DEBUG nova.compute.manager [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2134.866228] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96224132-58af-4868-9177-c46661a0f04a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.992190] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1bd0e40a-6f4c-4a2e-a20d-edfdb219d619 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.004104] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2550883-6eab-408e-bfe9-ca62c7bef202 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.039630] env[62875]: DEBUG nova.compute.manager [req-a46c6c8e-56e3-40fc-b0a7-2aac9292e57c req-ba745709-4df4-46ee-8201-d030f7b29d0c service nova] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Detach interface failed, port_id=34058ec7-a269-4392-a5aa-933b163e8602, reason: Instance 380229e2-25ba-47cb-a6ca-167b9d9672eb could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2135.049145] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180312, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.162678] env[62875]: DEBUG nova.compute.manager [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2135.162678] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2135.162678] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ac1f1c-dc5e-4882-aeb1-95d811361762 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.171786] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2135.173772] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32e98c70-b55d-4369-83b6-17f02e7b7bb2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.176506] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.179772] env[62875]: DEBUG oslo_vmware.api [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2135.179772] env[62875]: value = "task-2180313" [ 2135.179772] env[62875]: _type = "Task" [ 2135.179772] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.189699] env[62875]: DEBUG oslo_vmware.api [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180313, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.239787] env[62875]: DEBUG nova.network.neutron [-] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.331511] env[62875]: DEBUG nova.scheduler.client.report [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2135.379746] env[62875]: INFO nova.compute.manager [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] instance snapshotting [ 2135.383611] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58acf51e-59e8-41f6-ab95-48d216146720 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.409737] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924f9c98-18d5-47b3-a632-e4e79a4bb32d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.553774] env[62875]: DEBUG oslo_vmware.api [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180312, 'name': PowerOnVM_Task, 'duration_secs': 0.694358} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.554583] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2135.555320] env[62875]: INFO nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Took 13.88 seconds to spawn the instance on the hypervisor. [ 2135.555625] env[62875]: DEBUG nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2135.556877] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5813f901-e7f4-4f42-8ad5-3ca9104a9936 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.680944] env[62875]: DEBUG nova.network.neutron [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updated VIF entry in instance network info cache for port 3e6fd20c-b2c8-44ea-947f-cf7af45bc529. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2135.681739] env[62875]: DEBUG nova.network.neutron [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [{"id": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "address": "fa:16:3e:b9:e1:fb", "network": {"id": "260943be-4698-4425-ae4b-22d21e036685", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1758636538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82e42c29c6dd480c87096bea1977074d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e6fd20c-b2", "ovs_interfaceid": "3e6fd20c-b2c8-44ea-947f-cf7af45bc529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.695734] env[62875]: DEBUG oslo_vmware.api [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180313, 'name': PowerOffVM_Task, 'duration_secs': 0.311491} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.697178] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2135.697654] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2135.698441] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc7cf99f-d6fa-45e9-85d3-ac45df5b2efa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.743865] env[62875]: INFO nova.compute.manager [-] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Took 1.05 seconds to deallocate network for instance. [ 2135.838493] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.839334] env[62875]: DEBUG nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2135.844915] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.599s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.845285] env[62875]: DEBUG nova.objects.instance [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lazy-loading 'resources' on Instance uuid 44a248f6-443c-4b7c-95f0-088f0cdb924d {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2135.922851] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2135.923230] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-76a3c0a6-fa70-4f19-8cc8-850068332193 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.932088] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2135.932088] env[62875]: value = "task-2180315" [ 2135.932088] env[62875]: _type = "Task" [ 2135.932088] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.945082] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180315, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.987105] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2135.987364] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2135.987541] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Deleting the datastore file [datastore1] 8f817564-b224-4dcb-bd8c-4d63509a5628 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2135.987807] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fc4e87c-9ae5-49c2-9fa3-109c42b31954 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.995409] env[62875]: DEBUG oslo_vmware.api [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for the task: (returnval){ [ 2135.995409] env[62875]: value = "task-2180316" [ 2135.995409] env[62875]: _type = "Task" [ 2135.995409] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.005212] env[62875]: DEBUG oslo_vmware.api [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.086871] env[62875]: INFO nova.compute.manager [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Took 27.76 seconds to build instance. [ 2136.190263] env[62875]: DEBUG oslo_concurrency.lockutils [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] Releasing lock "refresh_cache-8f817564-b224-4dcb-bd8c-4d63509a5628" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2136.191419] env[62875]: DEBUG nova.compute.manager [req-625b1228-76dd-4963-94d8-d7d9f0be1ce7 req-cd8dc6d0-cf31-41c7-a180-331a2604af72 service nova] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Received event network-vif-deleted-45fd138d-48ba-4dbc-b40b-e424777fed62 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2136.253214] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2136.348924] env[62875]: DEBUG nova.compute.utils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2136.354871] env[62875]: DEBUG nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2136.354871] env[62875]: DEBUG nova.network.neutron [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2136.426561] env[62875]: DEBUG nova.policy [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e818b6d10af42bb9c86e79ae93de507', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7523e34b88d84ec1ae28221d8d1a3591', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2136.449111] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180315, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.507965] env[62875]: DEBUG oslo_vmware.api [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Task: {'id': task-2180316, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183693} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.508123] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2136.508306] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2136.508480] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2136.508661] env[62875]: INFO nova.compute.manager [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Took 1.35 seconds to destroy the instance on the hypervisor. [ 2136.508910] env[62875]: DEBUG oslo.service.loopingcall [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2136.509113] env[62875]: DEBUG nova.compute.manager [-] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2136.509207] env[62875]: DEBUG nova.network.neutron [-] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2136.589860] env[62875]: DEBUG oslo_concurrency.lockutils [None req-260a0e0d-f04a-43a1-b325-0033f94ef7b3 tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.282s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.625334] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920f61ab-6d52-4812-9463-51e74eef83ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.643497] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78e5e28-2b64-4186-a5ab-3bf394844e14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.679382] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525464e7-aac8-4b28-b2c7-bfb01c9b003b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.688085] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7808104-25a5-4be7-be86-3d6d8a354d82 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.704150] env[62875]: DEBUG nova.compute.provider_tree [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2136.855368] env[62875]: DEBUG nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2136.945310] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180315, 'name': CreateSnapshot_Task, 'duration_secs': 0.767458} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.945658] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2136.946368] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ae37c0-2c7a-48c9-adb8-ee3c11cc9417 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.211173] env[62875]: DEBUG nova.scheduler.client.report [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2137.319918] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "9dd30ca8-bf15-4a87-b055-3575445f4b79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2137.320920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.320920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2137.320920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.320920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.325981] env[62875]: INFO nova.compute.manager [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Terminating instance [ 2137.352200] env[62875]: DEBUG nova.compute.manager [req-a71140e3-4c52-4174-98c0-10f427d9dd7f req-7632582e-15ac-4c9f-8070-241500e2bf57 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Received event network-vif-deleted-3e6fd20c-b2c8-44ea-947f-cf7af45bc529 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2137.352492] env[62875]: INFO nova.compute.manager [req-a71140e3-4c52-4174-98c0-10f427d9dd7f req-7632582e-15ac-4c9f-8070-241500e2bf57 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Neutron deleted interface 3e6fd20c-b2c8-44ea-947f-cf7af45bc529; detaching it from the instance and deleting it from the info cache [ 2137.352672] env[62875]: DEBUG nova.network.neutron [req-a71140e3-4c52-4174-98c0-10f427d9dd7f req-7632582e-15ac-4c9f-8070-241500e2bf57 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.457089] env[62875]: DEBUG nova.network.neutron [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Successfully created port: 2c18fe19-36d2-4a2f-8c64-e8268acfc359 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2137.468016] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2137.468429] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-14d99e3b-0f5f-4a6e-afe8-f8e5a3b3818d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.478364] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2137.478364] env[62875]: value = "task-2180317" [ 2137.478364] env[62875]: _type = "Task" [ 2137.478364] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.494240] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180317, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.558053] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.560776] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.720620] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.876s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.724078] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.290s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.724341] env[62875]: DEBUG nova.objects.instance [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lazy-loading 'resources' on Instance uuid 380229e2-25ba-47cb-a6ca-167b9d9672eb {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2137.758852] env[62875]: INFO nova.scheduler.client.report [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted allocations for instance 44a248f6-443c-4b7c-95f0-088f0cdb924d [ 2137.829217] env[62875]: DEBUG nova.network.neutron [-] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.830881] env[62875]: DEBUG nova.compute.manager [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2137.831139] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2137.833589] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af327eda-fe77-4049-b307-7dda920b2432 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.845176] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2137.845176] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66d0c384-57e6-408b-a5e0-3fa50244efff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.851215] env[62875]: DEBUG oslo_vmware.api [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2137.851215] env[62875]: value = "task-2180318" [ 2137.851215] env[62875]: _type = "Task" [ 2137.851215] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.855869] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbc87611-194c-4c9a-a920-932eacb6df9a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.864381] env[62875]: DEBUG oslo_vmware.api [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.868058] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff2cabb-f450-451a-811d-13231cfab2fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.880889] env[62875]: DEBUG nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2137.910975] env[62875]: DEBUG nova.compute.manager [req-a71140e3-4c52-4174-98c0-10f427d9dd7f req-7632582e-15ac-4c9f-8070-241500e2bf57 service nova] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Detach interface failed, port_id=3e6fd20c-b2c8-44ea-947f-cf7af45bc529, reason: Instance 8f817564-b224-4dcb-bd8c-4d63509a5628 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2137.921212] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2137.921475] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2137.921636] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2137.921813] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2137.921960] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2137.922121] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2137.922339] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2137.922503] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2137.922669] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2137.922836] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2137.923024] env[62875]: DEBUG nova.virt.hardware [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2137.923940] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbab58e7-e0ad-4a58-920d-ab8545631970 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.932137] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81af4b9a-d05c-4b34-9f23-b7a86d13d544 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.988586] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180317, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.067245] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.068487] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2138.268074] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f06331d9-c71f-4779-ac88-3f9c8e63dbe1 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "44a248f6-443c-4b7c-95f0-088f0cdb924d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.789s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.340061] env[62875]: INFO nova.compute.manager [-] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Took 1.83 seconds to deallocate network for instance. [ 2138.367107] env[62875]: DEBUG oslo_vmware.api [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180318, 'name': PowerOffVM_Task, 'duration_secs': 0.419575} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.367568] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2138.367568] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2138.368351] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d7a36c4-103e-4973-b48d-23fb8eb6c11b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.440543] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772a8e6f-69d8-4c42-826c-c0e49a8dc67a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.449046] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c4aabc-bcc2-4bd9-800a-1da2b2f899b4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.486369] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2504e610-6c7d-494d-aa2f-9186e281e9c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.255306] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.260534] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e561216-f74b-4238-9e03-98775103a691 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.264398] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180317, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.264627] env[62875]: WARNING oslo_vmware.common.loopingcall [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] task run outlasted interval by 0.281922 sec [ 2139.264855] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2139.265048] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2139.265228] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Deleting the datastore file [datastore2] 9dd30ca8-bf15-4a87-b055-3575445f4b79 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2139.265807] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b0886ab-1134-4a4b-8dc5-fd250e514676 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.281441] env[62875]: DEBUG nova.compute.provider_tree [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2139.285572] env[62875]: DEBUG oslo_vmware.api [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for the task: (returnval){ [ 2139.285572] env[62875]: value = "task-2180320" [ 2139.285572] env[62875]: _type = "Task" [ 2139.285572] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.289804] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180317, 'name': CloneVM_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.300057] env[62875]: DEBUG oslo_vmware.api [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180320, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.307650] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2139.307852] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2139.308078] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2139.776283] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180317, 'name': CloneVM_Task, 'duration_secs': 1.838336} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.776599] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Created linked-clone VM from snapshot [ 2139.777379] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e202ac83-c6ec-4948-81c4-817ecb9dcc33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.785705] env[62875]: DEBUG nova.scheduler.client.report [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2139.793118] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Uploading image c3db3f4b-7101-488a-81eb-25039e53d59c {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2139.808798] env[62875]: DEBUG oslo_vmware.api [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Task: {'id': task-2180320, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.810018] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2139.810311] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2139.810556] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2139.811274] env[62875]: INFO nova.compute.manager [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Took 1.98 seconds to destroy the instance on the hypervisor. [ 2139.811274] env[62875]: DEBUG oslo.service.loopingcall [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2139.814226] env[62875]: DEBUG nova.compute.manager [-] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2139.814226] env[62875]: DEBUG nova.network.neutron [-] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2139.826951] env[62875]: DEBUG oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2139.826951] env[62875]: value = "vm-444953" [ 2139.826951] env[62875]: _type = "VirtualMachine" [ 2139.826951] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2139.827148] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6b94af08-ccf7-48a0-936b-b476bf73437f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.831150] env[62875]: DEBUG nova.compute.manager [req-33e5f1f7-1cde-45c4-9759-f79d188714a7 req-7d82373e-f287-4dfc-8b7c-dd49b265a05f service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Received event network-vif-plugged-2c18fe19-36d2-4a2f-8c64-e8268acfc359 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2139.831434] env[62875]: DEBUG oslo_concurrency.lockutils [req-33e5f1f7-1cde-45c4-9759-f79d188714a7 req-7d82373e-f287-4dfc-8b7c-dd49b265a05f service nova] Acquiring lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.831698] env[62875]: DEBUG oslo_concurrency.lockutils [req-33e5f1f7-1cde-45c4-9759-f79d188714a7 req-7d82373e-f287-4dfc-8b7c-dd49b265a05f service nova] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.831967] env[62875]: DEBUG oslo_concurrency.lockutils [req-33e5f1f7-1cde-45c4-9759-f79d188714a7 req-7d82373e-f287-4dfc-8b7c-dd49b265a05f service nova] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.832222] env[62875]: DEBUG nova.compute.manager [req-33e5f1f7-1cde-45c4-9759-f79d188714a7 req-7d82373e-f287-4dfc-8b7c-dd49b265a05f service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] No waiting events found dispatching network-vif-plugged-2c18fe19-36d2-4a2f-8c64-e8268acfc359 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2139.832446] env[62875]: WARNING nova.compute.manager [req-33e5f1f7-1cde-45c4-9759-f79d188714a7 req-7d82373e-f287-4dfc-8b7c-dd49b265a05f service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Received unexpected event network-vif-plugged-2c18fe19-36d2-4a2f-8c64-e8268acfc359 for instance with vm_state building and task_state spawning. [ 2139.838346] env[62875]: DEBUG oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lease: (returnval){ [ 2139.838346] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fdf2c4-a479-e637-c628-0e5d228348e3" [ 2139.838346] env[62875]: _type = "HttpNfcLease" [ 2139.838346] env[62875]: } obtained for exporting VM: (result){ [ 2139.838346] env[62875]: value = "vm-444953" [ 2139.838346] env[62875]: _type = "VirtualMachine" [ 2139.838346] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2139.838770] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the lease: (returnval){ [ 2139.838770] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fdf2c4-a479-e637-c628-0e5d228348e3" [ 2139.838770] env[62875]: _type = "HttpNfcLease" [ 2139.838770] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2139.846252] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2139.846252] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fdf2c4-a479-e637-c628-0e5d228348e3" [ 2139.846252] env[62875]: _type = "HttpNfcLease" [ 2139.846252] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2140.043918] env[62875]: DEBUG nova.network.neutron [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Successfully updated port: 2c18fe19-36d2-4a2f-8c64-e8268acfc359 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2140.301307] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.573s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2140.303209] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.127s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2140.303663] env[62875]: DEBUG nova.objects.instance [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lazy-loading 'resources' on Instance uuid d0c4095f-2d78-4055-b568-7e70e7c4c182 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2140.337685] env[62875]: INFO nova.scheduler.client.report [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted allocations for instance 380229e2-25ba-47cb-a6ca-167b9d9672eb [ 2140.347184] env[62875]: DEBUG nova.compute.manager [req-4cbf7c6d-3189-426e-bd0f-b7410b1cc9bb req-dc0314db-7ffc-4d41-8839-a76490c1051d service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Received event network-vif-deleted-c9763427-3b9c-4a83-a0df-a284b4cf99a0 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2140.347399] env[62875]: INFO nova.compute.manager [req-4cbf7c6d-3189-426e-bd0f-b7410b1cc9bb req-dc0314db-7ffc-4d41-8839-a76490c1051d service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Neutron deleted interface c9763427-3b9c-4a83-a0df-a284b4cf99a0; detaching it from the instance and deleting it from the info cache [ 2140.347656] env[62875]: DEBUG nova.network.neutron [req-4cbf7c6d-3189-426e-bd0f-b7410b1cc9bb req-dc0314db-7ffc-4d41-8839-a76490c1051d service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Updating instance_info_cache with network_info: [{"id": "88706d53-4951-4a59-9a6a-324554fac125", "address": "fa:16:3e:c2:6a:cd", "network": {"id": "65ac5479-1301-4af5-8c4d-e8271afb5440", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1628466981", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "63be470870764b6ab6e803cc2a345f24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88706d53-49", "ovs_interfaceid": "88706d53-4951-4a59-9a6a-324554fac125", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2140.354224] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2140.354224] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fdf2c4-a479-e637-c628-0e5d228348e3" [ 2140.354224] env[62875]: _type = "HttpNfcLease" [ 2140.354224] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2140.354224] env[62875]: DEBUG oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2140.354224] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fdf2c4-a479-e637-c628-0e5d228348e3" [ 2140.354224] env[62875]: _type = "HttpNfcLease" [ 2140.354224] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2140.355133] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cadb0c9-840b-42fb-94ce-4b20612b0cb2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.364336] env[62875]: DEBUG oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f60fd-e624-e7ea-e7ab-3786946a3d47/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2140.364582] env[62875]: DEBUG oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f60fd-e624-e7ea-e7ab-3786946a3d47/disk-0.vmdk for reading. {{(pid=62875) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2140.473264] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2f19cc06-97ab-4f61-9a12-94a3f6c63e18 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.548118] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2140.548118] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2140.548118] env[62875]: DEBUG nova.network.neutron [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2140.652080] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updating instance_info_cache with network_info: [{"id": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "address": "fa:16:3e:e9:29:5c", "network": {"id": "3028cdbc-4b41-4102-bd32-59fd93a60bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-125987334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9e631a043ef4ed9ae37c18a142afa38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87f73d-31", "ovs_interfaceid": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2140.823533] env[62875]: DEBUG nova.network.neutron [-] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2140.846816] env[62875]: DEBUG oslo_concurrency.lockutils [None req-76010cad-7653-439d-9e1e-198803f665b7 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "380229e2-25ba-47cb-a6ca-167b9d9672eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.084s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2140.849925] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7f10469-bfba-4932-adb8-abba8f64122a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.863493] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9de04e1-3088-4aee-b57a-bc550cbcbe11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.900911] env[62875]: DEBUG nova.compute.manager [req-4cbf7c6d-3189-426e-bd0f-b7410b1cc9bb req-dc0314db-7ffc-4d41-8839-a76490c1051d service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Detach interface failed, port_id=c9763427-3b9c-4a83-a0df-a284b4cf99a0, reason: Instance 9dd30ca8-bf15-4a87-b055-3575445f4b79 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2141.054067] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df382783-1185-43c3-b00c-c2ff1ad4edd8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.063080] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a5fc95-6322-402b-8b59-accf933775c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.098123] env[62875]: DEBUG nova.network.neutron [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2141.101244] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb56777-95ee-404f-88d1-b0b5e22f89f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.112944] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697246ba-e35e-43bf-b51a-da3d6c182f60 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.127094] env[62875]: DEBUG nova.compute.provider_tree [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2141.152956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2141.153472] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2141.155746] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.155746] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.155746] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.155746] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.155746] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.155746] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.155746] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2141.155746] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.327892] env[62875]: INFO nova.compute.manager [-] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Took 1.51 seconds to deallocate network for instance. [ 2141.369456] env[62875]: DEBUG nova.network.neutron [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance_info_cache with network_info: [{"id": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "address": "fa:16:3e:de:04:4b", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c18fe19-36", "ovs_interfaceid": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2141.633468] env[62875]: DEBUG nova.scheduler.client.report [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2141.658514] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.835876] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.871900] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2141.872341] env[62875]: DEBUG nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Instance network_info: |[{"id": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "address": "fa:16:3e:de:04:4b", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c18fe19-36", "ovs_interfaceid": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2141.872869] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:04:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c18fe19-36d2-4a2f-8c64-e8268acfc359', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2141.883651] env[62875]: DEBUG oslo.service.loopingcall [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2141.883651] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2141.884139] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3342a4d3-a819-4907-af89-b44efbec1ec0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.909280] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2141.909280] env[62875]: value = "task-2180322" [ 2141.909280] env[62875]: _type = "Task" [ 2141.909280] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.918431] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180322, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.139762] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.836s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.145113] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.890s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.145113] env[62875]: DEBUG nova.objects.instance [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lazy-loading 'resources' on Instance uuid 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2142.165705] env[62875]: INFO nova.scheduler.client.report [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Deleted allocations for instance d0c4095f-2d78-4055-b568-7e70e7c4c182 [ 2142.421804] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180322, 'name': CreateVM_Task, 'duration_secs': 0.415482} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.422167] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2142.422758] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2142.422943] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2142.423332] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2142.423809] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e39b1e0-c4cb-4cb4-93e2-96d90f1c11db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.428732] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2142.428732] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d82240-737e-e019-69a8-fc4c88c41fc0" [ 2142.428732] env[62875]: _type = "Task" [ 2142.428732] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.437715] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d82240-737e-e019-69a8-fc4c88c41fc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.673834] env[62875]: DEBUG oslo_concurrency.lockutils [None req-61a1da6e-55f7-44ab-8a4c-bcfa40ff6e82 tempest-SecurityGroupsTestJSON-1097908726 tempest-SecurityGroupsTestJSON-1097908726-project-member] Lock "d0c4095f-2d78-4055-b568-7e70e7c4c182" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.170s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.807198] env[62875]: DEBUG oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5573d-0fc5-e065-1897-4dc22b36a60f/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2142.809074] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387fd3ed-2cd8-4981-ace6-8e1d1b935d47 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.815487] env[62875]: DEBUG oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5573d-0fc5-e065-1897-4dc22b36a60f/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2142.815669] env[62875]: ERROR oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5573d-0fc5-e065-1897-4dc22b36a60f/disk-0.vmdk due to incomplete transfer. [ 2142.815926] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5871477a-b2b0-4619-a730-932cfd39f1fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.823957] env[62875]: DEBUG oslo_vmware.rw_handles [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5573d-0fc5-e065-1897-4dc22b36a60f/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2142.824177] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Uploaded image f1e4bb61-3bf6-4830-a84f-9d4f17bcb1f3 to the Glance image server {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2142.826836] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2142.827461] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-81acf081-5c7f-418b-8ae8-6fa6434b66d0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.834678] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2142.834678] env[62875]: value = "task-2180323" [ 2142.834678] env[62875]: _type = "Task" [ 2142.834678] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.843315] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180323, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.902583] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc776e3-4538-4793-b714-1690cf2ed809 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.910261] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448ac591-3e13-46bd-8e52-db5e6f05e8e2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.952208] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f560ae1-f521-401d-8b29-ddd2729fdc0d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.955969] env[62875]: DEBUG nova.compute.manager [req-7e13addb-8b8b-441d-8d39-5c862e68dbbb req-81b71655-d173-48c8-948c-3c1f80aa29b6 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Received event network-changed-2c18fe19-36d2-4a2f-8c64-e8268acfc359 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2142.956192] env[62875]: DEBUG nova.compute.manager [req-7e13addb-8b8b-441d-8d39-5c862e68dbbb req-81b71655-d173-48c8-948c-3c1f80aa29b6 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Refreshing instance network info cache due to event network-changed-2c18fe19-36d2-4a2f-8c64-e8268acfc359. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2142.956429] env[62875]: DEBUG oslo_concurrency.lockutils [req-7e13addb-8b8b-441d-8d39-5c862e68dbbb req-81b71655-d173-48c8-948c-3c1f80aa29b6 service nova] Acquiring lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2142.956583] env[62875]: DEBUG oslo_concurrency.lockutils [req-7e13addb-8b8b-441d-8d39-5c862e68dbbb req-81b71655-d173-48c8-948c-3c1f80aa29b6 service nova] Acquired lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2142.956762] env[62875]: DEBUG nova.network.neutron [req-7e13addb-8b8b-441d-8d39-5c862e68dbbb req-81b71655-d173-48c8-948c-3c1f80aa29b6 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Refreshing network info cache for port 2c18fe19-36d2-4a2f-8c64-e8268acfc359 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2142.966749] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d82240-737e-e019-69a8-fc4c88c41fc0, 'name': SearchDatastore_Task, 'duration_secs': 0.01161} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.969715] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2142.969989] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2142.970267] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2142.970432] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2142.970628] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2142.971425] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb36d0fa-a9e6-4d31-a2ef-b9eea9f188f4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.974549] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72889aa4-8808-4fe7-8a09-fcaf924a999b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.994179] env[62875]: DEBUG nova.compute.provider_tree [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2142.997660] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2142.997860] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2142.998852] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05504b46-58ac-4c37-871b-980476463b0a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.005311] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2143.005311] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ec83d6-1f81-24be-7079-db7af5303310" [ 2143.005311] env[62875]: _type = "Task" [ 2143.005311] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.014060] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ec83d6-1f81-24be-7079-db7af5303310, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.154919] env[62875]: DEBUG nova.compute.manager [req-6620dbb2-8c67-489d-aafc-c4ee7490f0f1 req-cd546416-0e43-42e0-8f74-ef29e8e53d82 service nova] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Received event network-vif-deleted-88706d53-4951-4a59-9a6a-324554fac125 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2143.345041] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180323, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.497811] env[62875]: DEBUG nova.scheduler.client.report [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2143.516114] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ec83d6-1f81-24be-7079-db7af5303310, 'name': SearchDatastore_Task, 'duration_secs': 0.009778} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.517576] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d52008a-b7f1-4178-8d53-5ea652afa939 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.524568] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2143.524568] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4f6e-80d5-eecf-c041-00575ebc180e" [ 2143.524568] env[62875]: _type = "Task" [ 2143.524568] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.535245] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4f6e-80d5-eecf-c041-00575ebc180e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.670500] env[62875]: DEBUG nova.network.neutron [req-7e13addb-8b8b-441d-8d39-5c862e68dbbb req-81b71655-d173-48c8-948c-3c1f80aa29b6 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updated VIF entry in instance network info cache for port 2c18fe19-36d2-4a2f-8c64-e8268acfc359. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2143.670985] env[62875]: DEBUG nova.network.neutron [req-7e13addb-8b8b-441d-8d39-5c862e68dbbb req-81b71655-d173-48c8-948c-3c1f80aa29b6 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance_info_cache with network_info: [{"id": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "address": "fa:16:3e:de:04:4b", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c18fe19-36", "ovs_interfaceid": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.761100] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.761348] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.846033] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180323, 'name': Destroy_Task, 'duration_secs': 0.608501} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.846174] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Destroyed the VM [ 2143.846371] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2143.846636] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dcd1466c-aca3-4d30-8ead-4abef43de2cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.853777] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2143.853777] env[62875]: value = "task-2180324" [ 2143.853777] env[62875]: _type = "Task" [ 2143.853777] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.863036] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180324, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.003262] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.005914] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.751s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.006141] env[62875]: DEBUG nova.objects.instance [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lazy-loading 'resources' on Instance uuid 8f817564-b224-4dcb-bd8c-4d63509a5628 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2144.030055] env[62875]: INFO nova.scheduler.client.report [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Deleted allocations for instance 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d [ 2144.039053] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4f6e-80d5-eecf-c041-00575ebc180e, 'name': SearchDatastore_Task, 'duration_secs': 0.010343} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.039376] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2144.039634] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9/85f7c7dc-03c4-44ff-8502-cf61ee7c3af9.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2144.039922] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-602e9c8e-ba7a-4b58-9560-9b037c7b12f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.049144] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2144.049144] env[62875]: value = "task-2180325" [ 2144.049144] env[62875]: _type = "Task" [ 2144.049144] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.058037] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.174166] env[62875]: DEBUG oslo_concurrency.lockutils [req-7e13addb-8b8b-441d-8d39-5c862e68dbbb req-81b71655-d173-48c8-948c-3c1f80aa29b6 service nova] Releasing lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2144.263831] env[62875]: DEBUG nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2144.367891] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180324, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.543019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c478e9e-2e2c-4275-ab43-e4f5564a9fc8 tempest-ServerShowV247Test-611317025 tempest-ServerShowV247Test-611317025-project-member] Lock "737c68b0-9ccf-4e0b-a46d-aa78f7981c3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.553s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.560152] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468412} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.562808] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9/85f7c7dc-03c4-44ff-8502-cf61ee7c3af9.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2144.563075] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2144.563837] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3332f052-c182-4c18-865a-8cbd780276dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.573410] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2144.573410] env[62875]: value = "task-2180326" [ 2144.573410] env[62875]: _type = "Task" [ 2144.573410] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.580257] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.678568] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709821ca-23ee-47a5-bb37-09cec04dfd82 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.687176] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904e8444-8b82-4169-8e50-b6e6f7dceeff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.720777] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f24a079-19fe-4598-b6a9-7803edcd3cdc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.729864] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de0cedc-0e5f-4b33-9f73-441ba461170d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.747220] env[62875]: DEBUG nova.compute.provider_tree [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2144.792041] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.867550] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180324, 'name': RemoveSnapshot_Task, 'duration_secs': 0.606689} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.867905] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2144.868806] env[62875]: DEBUG nova.compute.manager [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2144.870024] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbb87b2-a81b-4141-a979-2caac1a62954 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.080714] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180326, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072186} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.081070] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2145.081956] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbefc21e-0973-4836-aa22-fdc74571b6f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.105136] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9/85f7c7dc-03c4-44ff-8502-cf61ee7c3af9.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2145.105470] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da4e1f65-32f3-46ec-a5a1-7662c7cd1bea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.126607] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2145.126607] env[62875]: value = "task-2180327" [ 2145.126607] env[62875]: _type = "Task" [ 2145.126607] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.135291] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180327, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.250267] env[62875]: DEBUG nova.scheduler.client.report [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2145.386032] env[62875]: INFO nova.compute.manager [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Shelve offloading [ 2145.637692] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180327, 'name': ReconfigVM_Task, 'duration_secs': 0.290372} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.638158] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9/85f7c7dc-03c4-44ff-8502-cf61ee7c3af9.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2145.638918] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c660c91-b6ee-416c-a9de-f20732309370 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.646546] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2145.646546] env[62875]: value = "task-2180328" [ 2145.646546] env[62875]: _type = "Task" [ 2145.646546] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.655350] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180328, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.756375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.758720] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.100s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.758923] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.759306] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2145.759643] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.924s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.760094] env[62875]: DEBUG nova.objects.instance [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lazy-loading 'resources' on Instance uuid 9dd30ca8-bf15-4a87-b055-3575445f4b79 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2145.761849] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2327cd33-634f-41f5-80f6-01784681406d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.772087] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c856dc-5726-4d2c-b68a-36a11011da05 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.793918] env[62875]: INFO nova.scheduler.client.report [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Deleted allocations for instance 8f817564-b224-4dcb-bd8c-4d63509a5628 [ 2145.796033] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe640969-866f-4406-87a0-04ed1f8dcbf7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.807661] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1569c5-9f85-40b2-9e24-77dde7246f74 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.840271] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180156MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2145.840440] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.894369] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2145.894369] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b296648-36d1-43d8-b492-840f9c028843 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.901626] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2145.901626] env[62875]: value = "task-2180329" [ 2145.901626] env[62875]: _type = "Task" [ 2145.901626] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.912157] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] VM already powered off {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2145.912420] env[62875]: DEBUG nova.compute.manager [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2145.913289] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6471bf1-3136-469e-a39e-e8f34bbf193d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.919839] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.920037] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.920220] env[62875]: DEBUG nova.network.neutron [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2146.159193] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180328, 'name': Rename_Task, 'duration_secs': 0.152503} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.159490] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2146.159736] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-790045bf-4c9c-4771-a206-bf62d7813896 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.165858] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2146.165858] env[62875]: value = "task-2180330" [ 2146.165858] env[62875]: _type = "Task" [ 2146.165858] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.173788] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.305559] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b35c9b6c-a5d8-43ef-9579-7a496367a6ec tempest-ServerRescueTestJSONUnderV235-1667398956 tempest-ServerRescueTestJSONUnderV235-1667398956-project-member] Lock "8f817564-b224-4dcb-bd8c-4d63509a5628" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.650s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.418985] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7191dd63-80bf-47fc-b3c5-7549c014ae0b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.427761] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89950cd-a74f-4898-b8e7-d3aadc27fec4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.461369] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6347c68b-2ddb-42cf-b6b1-212d0ec11bbc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.470084] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8d46f4-589d-47a0-bcb4-9e6915696778 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.485326] env[62875]: DEBUG nova.compute.provider_tree [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2146.679656] env[62875]: DEBUG oslo_vmware.api [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180330, 'name': PowerOnVM_Task, 'duration_secs': 0.478362} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.679931] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2146.680381] env[62875]: INFO nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Took 8.80 seconds to spawn the instance on the hypervisor. [ 2146.680617] env[62875]: DEBUG nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2146.681722] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa3c70c-fe57-46e0-833c-23132570c8a4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.836107] env[62875]: DEBUG nova.network.neutron [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Updating instance_info_cache with network_info: [{"id": "dcbee196-268d-4405-a144-5c6eca186b1d", "address": "fa:16:3e:ef:8b:ce", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcbee196-26", "ovs_interfaceid": "dcbee196-268d-4405-a144-5c6eca186b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.856658] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.856977] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.991823] env[62875]: DEBUG nova.scheduler.client.report [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2147.208182] env[62875]: INFO nova.compute.manager [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Took 18.96 seconds to build instance. [ 2147.312047] env[62875]: DEBUG oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f60fd-e624-e7ea-e7ab-3786946a3d47/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2147.313191] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa72824-48c0-4757-b4e2-c895209e9746 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.320011] env[62875]: DEBUG oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f60fd-e624-e7ea-e7ab-3786946a3d47/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2147.320336] env[62875]: ERROR oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f60fd-e624-e7ea-e7ab-3786946a3d47/disk-0.vmdk due to incomplete transfer. [ 2147.321558] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f55a2fc8-8e5f-453e-8ba1-f232cc8b33bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.328031] env[62875]: DEBUG oslo_vmware.rw_handles [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f60fd-e624-e7ea-e7ab-3786946a3d47/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2147.328269] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Uploaded image c3db3f4b-7101-488a-81eb-25039e53d59c to the Glance image server {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2147.332599] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2147.332599] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-457dd0da-b5c5-4bff-bf78-cb0bdfb031ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.336706] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2147.336706] env[62875]: value = "task-2180331" [ 2147.336706] env[62875]: _type = "Task" [ 2147.336706] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.342170] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2147.346540] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.360654] env[62875]: DEBUG nova.compute.manager [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2147.456065] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "2dd748c2-048d-4450-a393-995249a9deb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2147.456305] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "2dd748c2-048d-4450-a393-995249a9deb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.501045] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2147.501045] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.709s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.502551] env[62875]: INFO nova.compute.claims [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2147.520030] env[62875]: INFO nova.scheduler.client.report [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Deleted allocations for instance 9dd30ca8-bf15-4a87-b055-3575445f4b79 [ 2147.712542] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8662b277-90e4-4264-994a-368a5ae1b64e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.483s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2147.824565] env[62875]: DEBUG nova.compute.manager [req-8a39b788-1c78-4d0d-b8ec-77cbd36dd682 req-fea75ba7-c862-4c57-bf04-d72e080af2a9 service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Received event network-vif-unplugged-dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2147.824565] env[62875]: DEBUG oslo_concurrency.lockutils [req-8a39b788-1c78-4d0d-b8ec-77cbd36dd682 req-fea75ba7-c862-4c57-bf04-d72e080af2a9 service nova] Acquiring lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2147.824565] env[62875]: DEBUG oslo_concurrency.lockutils [req-8a39b788-1c78-4d0d-b8ec-77cbd36dd682 req-fea75ba7-c862-4c57-bf04-d72e080af2a9 service nova] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.824565] env[62875]: DEBUG oslo_concurrency.lockutils [req-8a39b788-1c78-4d0d-b8ec-77cbd36dd682 req-fea75ba7-c862-4c57-bf04-d72e080af2a9 service nova] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2147.824565] env[62875]: DEBUG nova.compute.manager [req-8a39b788-1c78-4d0d-b8ec-77cbd36dd682 req-fea75ba7-c862-4c57-bf04-d72e080af2a9 service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] No waiting events found dispatching network-vif-unplugged-dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2147.824773] env[62875]: WARNING nova.compute.manager [req-8a39b788-1c78-4d0d-b8ec-77cbd36dd682 req-fea75ba7-c862-4c57-bf04-d72e080af2a9 service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Received unexpected event network-vif-unplugged-dcbee196-268d-4405-a144-5c6eca186b1d for instance with vm_state shelved and task_state shelving_offloading. [ 2147.848927] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.854654] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2147.854899] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.893417] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2147.894842] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2147.895693] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b307171-eeef-47a3-9bbb-caefcdb87c40 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.904673] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2147.904933] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c057fb97-46e1-46ba-ada1-09ae1aee249a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.958950] env[62875]: DEBUG nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2148.034631] env[62875]: DEBUG oslo_concurrency.lockutils [None req-49a6120b-a7f3-4db9-a359-8c179ac8083e tempest-ServersTestMultiNic-1319225464 tempest-ServersTestMultiNic-1319225464-project-member] Lock "9dd30ca8-bf15-4a87-b055-3575445f4b79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.714s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.182320] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "85be399c-2482-4a19-b68f-b45aa4e6846b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2148.182629] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "85be399c-2482-4a19-b68f-b45aa4e6846b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2148.348021] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.360789] env[62875]: DEBUG nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2148.495320] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2148.685732] env[62875]: DEBUG nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2148.750163] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb8823c-c24d-484a-8841-a124a7f9ad6d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.758214] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e6b8a0-0ff7-4eca-bb4c-b81cff0b3800 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.794834] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd9a061-4a86-4abb-8007-3c61dd567e64 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.803446] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93dbe1d9-e21b-4ea7-98e7-366c62f0fa9c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.818093] env[62875]: DEBUG nova.compute.provider_tree [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2148.853924] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.887902] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.208678] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.321686] env[62875]: DEBUG nova.scheduler.client.report [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2149.333969] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "5224c475-8739-4137-82e7-c9d149d41d61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.334236] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "5224c475-8739-4137-82e7-c9d149d41d61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.350335] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.827531] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.828175] env[62875]: DEBUG nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2149.831663] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.991s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.836704] env[62875]: DEBUG nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2149.849348] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.859217] env[62875]: DEBUG nova.compute.manager [req-fb187f01-69a1-458a-bd0b-e69fedc3aa59 req-0efd6be7-b10a-4973-9738-6c55cf231df7 service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Received event network-changed-dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2149.859384] env[62875]: DEBUG nova.compute.manager [req-fb187f01-69a1-458a-bd0b-e69fedc3aa59 req-0efd6be7-b10a-4973-9738-6c55cf231df7 service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Refreshing instance network info cache due to event network-changed-dcbee196-268d-4405-a144-5c6eca186b1d. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2149.859598] env[62875]: DEBUG oslo_concurrency.lockutils [req-fb187f01-69a1-458a-bd0b-e69fedc3aa59 req-0efd6be7-b10a-4973-9738-6c55cf231df7 service nova] Acquiring lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.859739] env[62875]: DEBUG oslo_concurrency.lockutils [req-fb187f01-69a1-458a-bd0b-e69fedc3aa59 req-0efd6be7-b10a-4973-9738-6c55cf231df7 service nova] Acquired lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2149.859896] env[62875]: DEBUG nova.network.neutron [req-fb187f01-69a1-458a-bd0b-e69fedc3aa59 req-0efd6be7-b10a-4973-9738-6c55cf231df7 service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Refreshing network info cache for port dcbee196-268d-4405-a144-5c6eca186b1d {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2149.929460] env[62875]: DEBUG nova.compute.manager [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Stashing vm_state: active {{(pid=62875) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2150.334767] env[62875]: DEBUG nova.compute.utils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2150.336176] env[62875]: DEBUG nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2150.336347] env[62875]: DEBUG nova.network.neutron [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2150.363429] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.372247] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.399179] env[62875]: DEBUG nova.policy [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3d0e175791341aea0db00ef8a1b5680', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '226340868e7446cca12688a32d13c630', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2150.451146] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.608264] env[62875]: DEBUG nova.network.neutron [req-fb187f01-69a1-458a-bd0b-e69fedc3aa59 req-0efd6be7-b10a-4973-9738-6c55cf231df7 service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Updated VIF entry in instance network info cache for port dcbee196-268d-4405-a144-5c6eca186b1d. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2150.608628] env[62875]: DEBUG nova.network.neutron [req-fb187f01-69a1-458a-bd0b-e69fedc3aa59 req-0efd6be7-b10a-4973-9738-6c55cf231df7 service nova] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Updating instance_info_cache with network_info: [{"id": "dcbee196-268d-4405-a144-5c6eca186b1d", "address": "fa:16:3e:ef:8b:ce", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": null, "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapdcbee196-26", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2150.814472] env[62875]: DEBUG nova.network.neutron [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Successfully created port: b79962c5-9f95-4d9b-ae67-11445b571d91 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2150.840253] env[62875]: DEBUG nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2150.846415] env[62875]: INFO nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating resource usage from migration 9265753b-3d23-4aaf-b8fb-0f4a7f38de9d [ 2150.858559] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.872866] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7969485a-ccd6-48e0-bdea-b8920af28843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2150.873126] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 9e0aaea6-96cf-494d-9f70-a709a47f9772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2150.873345] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2150.873553] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 70547fbd-7ce8-466e-8abc-b490b8dd6b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2150.873760] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 45403db3-ff20-42d3-8a37-8db671d8c1fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2150.874090] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7c081de0-1952-4ca8-8f6f-80102e20bff0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2150.874222] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 346f4371-3029-4710-9163-08cf36196207 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2150.874991] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d40aaba6-020d-45b9-83e7-8d7fe382b20f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2151.111484] env[62875]: DEBUG oslo_concurrency.lockutils [req-fb187f01-69a1-458a-bd0b-e69fedc3aa59 req-0efd6be7-b10a-4973-9738-6c55cf231df7 service nova] Releasing lock "refresh_cache-7c081de0-1952-4ca8-8f6f-80102e20bff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2151.358292] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.378208] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2151.851426] env[62875]: DEBUG nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2151.862309] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.877095] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2151.877095] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2151.877095] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2151.877095] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2151.877095] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2151.877380] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2151.877380] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2151.877523] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2151.877688] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2151.877849] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2151.878065] env[62875]: DEBUG nova.virt.hardware [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2151.878911] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7030edc-a3e3-4fbe-88bf-5b8ceb7f3e16 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.881753] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2dd748c2-048d-4450-a393-995249a9deb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2151.888053] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f8637f-e30a-46cc-b223-da958bd9ec6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.363191] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.384589] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 79afdeda-8a95-4ad4-ba10-0424cedf1d6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2152.863178] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.887123] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 85be399c-2482-4a19-b68f-b45aa4e6846b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2153.364382] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.390326] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 5224c475-8739-4137-82e7-c9d149d41d61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2153.390491] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Migration 9265753b-3d23-4aaf-b8fb-0f4a7f38de9d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2153.390620] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2153.390867] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2153.391069] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2153.562413] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c24367-f4f3-4377-be05-44002f1dab3e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.569328] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dac08dc-9c48-462a-ac38-f64f510a4aef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.599395] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b799d683-f7d4-4c26-9cda-3c76f4eb29e9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.606426] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a860296a-617f-43f1-aa1f-464458b98895 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.619082] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2153.865336] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2154.121778] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2154.366900] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2154.627148] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2154.627148] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.795s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.627295] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.734s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.628816] env[62875]: INFO nova.compute.claims [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2154.866525] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2155.367600] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2155.815365] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab74a47-0b77-4fa5-b7b8-67c0420a392a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.822878] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168a3b12-3e49-4a55-8fa0-98200f91a904 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.853206] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9b08c5-2ddd-48be-96e1-8aa66d21f28e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.863069] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405f3c07-6e87-4c06-983f-b8cf106e1ec3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.871301] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2155.878916] env[62875]: DEBUG nova.compute.provider_tree [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2156.369627] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.381647] env[62875]: DEBUG nova.scheduler.client.report [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2156.870588] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.886549] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.259s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.887072] env[62875]: DEBUG nova.compute.manager [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2156.889697] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.396s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2156.891101] env[62875]: INFO nova.compute.claims [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2157.185785] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2157.186035] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2157.186245] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleting the datastore file [datastore2] 7c081de0-1952-4ca8-8f6f-80102e20bff0 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2157.186506] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0dec2772-3f00-49bd-9d78-986ba3e5396b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.192728] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2157.192728] env[62875]: value = "task-2180333" [ 2157.192728] env[62875]: _type = "Task" [ 2157.192728] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.200527] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.370346] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180331, 'name': Destroy_Task, 'duration_secs': 9.915882} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2157.370629] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Destroyed the VM [ 2157.370869] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2157.371172] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-895663ed-7ca0-48dd-a6f9-6f1607ec264d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.378130] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2157.378130] env[62875]: value = "task-2180334" [ 2157.378130] env[62875]: _type = "Task" [ 2157.378130] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.385445] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180334, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.395724] env[62875]: DEBUG nova.compute.utils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2157.399845] env[62875]: DEBUG nova.compute.manager [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Not allocating networking since 'none' was specified. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2157.703079] env[62875]: DEBUG oslo_vmware.api [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17563} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2157.703326] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2157.703531] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2157.703742] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2157.721592] env[62875]: INFO nova.scheduler.client.report [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted allocations for instance 7c081de0-1952-4ca8-8f6f-80102e20bff0 [ 2157.887872] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180334, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.898382] env[62875]: DEBUG nova.compute.manager [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2158.077694] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993a447b-dff3-4aaa-b7e3-d6d6bcf3f0ee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.085024] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d500ba94-4d59-41ac-bac9-0b1704b43ceb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.114289] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f91da2-31d5-4043-ad05-846c89fd3f82 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.122188] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78aea02e-72de-41b5-bec9-66cd3835885a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.136545] env[62875]: DEBUG nova.compute.provider_tree [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2158.226678] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.247398] env[62875]: DEBUG nova.compute.manager [req-deac706f-c54d-4098-b665-da1c239abfce req-96c2b85a-9b7c-4915-9fa1-0c6de39476bc service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Received event network-vif-plugged-b79962c5-9f95-4d9b-ae67-11445b571d91 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2158.247653] env[62875]: DEBUG oslo_concurrency.lockutils [req-deac706f-c54d-4098-b665-da1c239abfce req-96c2b85a-9b7c-4915-9fa1-0c6de39476bc service nova] Acquiring lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.247809] env[62875]: DEBUG oslo_concurrency.lockutils [req-deac706f-c54d-4098-b665-da1c239abfce req-96c2b85a-9b7c-4915-9fa1-0c6de39476bc service nova] Lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.248376] env[62875]: DEBUG oslo_concurrency.lockutils [req-deac706f-c54d-4098-b665-da1c239abfce req-96c2b85a-9b7c-4915-9fa1-0c6de39476bc service nova] Lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.248376] env[62875]: DEBUG nova.compute.manager [req-deac706f-c54d-4098-b665-da1c239abfce req-96c2b85a-9b7c-4915-9fa1-0c6de39476bc service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] No waiting events found dispatching network-vif-plugged-b79962c5-9f95-4d9b-ae67-11445b571d91 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2158.248376] env[62875]: WARNING nova.compute.manager [req-deac706f-c54d-4098-b665-da1c239abfce req-96c2b85a-9b7c-4915-9fa1-0c6de39476bc service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Received unexpected event network-vif-plugged-b79962c5-9f95-4d9b-ae67-11445b571d91 for instance with vm_state building and task_state spawning. [ 2158.350023] env[62875]: DEBUG nova.network.neutron [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Successfully updated port: b79962c5-9f95-4d9b-ae67-11445b571d91 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2158.388976] env[62875]: DEBUG oslo_vmware.api [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180334, 'name': RemoveSnapshot_Task, 'duration_secs': 0.703627} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.389219] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2158.389467] env[62875]: INFO nova.compute.manager [None req-67ff0808-4942-4816-a682-7edf8bfcb909 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Took 23.01 seconds to snapshot the instance on the hypervisor. [ 2158.639468] env[62875]: DEBUG nova.scheduler.client.report [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2158.852677] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "refresh_cache-d40aaba6-020d-45b9-83e7-8d7fe382b20f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2158.852866] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "refresh_cache-d40aaba6-020d-45b9-83e7-8d7fe382b20f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2158.853052] env[62875]: DEBUG nova.network.neutron [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2158.912017] env[62875]: DEBUG nova.compute.manager [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2158.935319] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.937395] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2158.937621] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2158.937777] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2158.937962] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2158.938129] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2158.938281] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2158.938487] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2158.938646] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2158.938881] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2158.939185] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2158.939487] env[62875]: DEBUG nova.virt.hardware [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2158.940882] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50aec47b-4af2-4d94-a052-4de404b78416 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.949267] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2357a4f-04e8-4bd9-b524-9ea7fbf70feb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.963360] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2158.971329] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Creating folder: Project (2411158f85324ab1824a5fb24ed5620e). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2158.971718] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-885023bf-2c0b-4f65-9e85-ec9ec00980d9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.982829] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Created folder: Project (2411158f85324ab1824a5fb24ed5620e) in parent group-v444854. [ 2158.983139] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Creating folder: Instances. Parent ref: group-v444955. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2158.983458] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-667ebe84-3faa-4bd5-bcd2-4096bf2521bb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.993504] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Created folder: Instances in parent group-v444955. [ 2158.993845] env[62875]: DEBUG oslo.service.loopingcall [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2158.994136] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2158.994428] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e62d60f5-4a1c-4691-a325-12dccb59ad6b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.018064] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2159.018064] env[62875]: value = "task-2180337" [ 2159.018064] env[62875]: _type = "Task" [ 2159.018064] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.028702] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180337, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.144372] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.144960] env[62875]: DEBUG nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2159.147591] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.260s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.149049] env[62875]: INFO nova.compute.claims [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2159.529945] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180337, 'name': CreateVM_Task, 'duration_secs': 0.270113} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.530079] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2159.530549] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2159.530716] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2159.531083] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2159.531333] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c02abcb4-57a9-4675-9be6-fe36fcd094e2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.535685] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2159.535685] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b5d25c-d472-20d9-8a69-965ab404618d" [ 2159.535685] env[62875]: _type = "Task" [ 2159.535685] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.545169] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b5d25c-d472-20d9-8a69-965ab404618d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.557617] env[62875]: DEBUG nova.network.neutron [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2159.655658] env[62875]: DEBUG nova.compute.utils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2159.657032] env[62875]: DEBUG nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2159.657208] env[62875]: DEBUG nova.network.neutron [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2159.698983] env[62875]: DEBUG nova.policy [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b7a1b271e194665a976226647254e54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c27ce2b0bc554605a2ea3606d1e182ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2159.701189] env[62875]: DEBUG nova.network.neutron [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Updating instance_info_cache with network_info: [{"id": "b79962c5-9f95-4d9b-ae67-11445b571d91", "address": "fa:16:3e:81:1d:7b", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb79962c5-9f", "ovs_interfaceid": "b79962c5-9f95-4d9b-ae67-11445b571d91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2159.956795] env[62875]: DEBUG nova.network.neutron [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Successfully created port: 846fd804-bdbf-498c-a3f7-9741200ee2d4 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2160.045982] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b5d25c-d472-20d9-8a69-965ab404618d, 'name': SearchDatastore_Task, 'duration_secs': 0.009193} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.046283] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2160.046515] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2160.046750] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2160.046915] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2160.047156] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2160.047440] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad01f616-f124-4e81-aa72-2fe20e5f30fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.055954] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2160.056151] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2160.056838] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04bdfc2d-246f-45cf-a7b2-13db1413fe48 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.062282] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2160.062282] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d4d72a-78b9-27b4-0854-1bc2425629e3" [ 2160.062282] env[62875]: _type = "Task" [ 2160.062282] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.069746] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d4d72a-78b9-27b4-0854-1bc2425629e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.160466] env[62875]: DEBUG nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2160.204331] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "refresh_cache-d40aaba6-020d-45b9-83e7-8d7fe382b20f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2160.204643] env[62875]: DEBUG nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Instance network_info: |[{"id": "b79962c5-9f95-4d9b-ae67-11445b571d91", "address": "fa:16:3e:81:1d:7b", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb79962c5-9f", "ovs_interfaceid": "b79962c5-9f95-4d9b-ae67-11445b571d91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2160.205305] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:1d:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b79962c5-9f95-4d9b-ae67-11445b571d91', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2160.213311] env[62875]: DEBUG oslo.service.loopingcall [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2160.215876] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2160.216300] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c66fa77-042c-4c54-923c-403504f95792 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.238065] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2160.238065] env[62875]: value = "task-2180338" [ 2160.238065] env[62875]: _type = "Task" [ 2160.238065] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.246869] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180338, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.272287] env[62875]: DEBUG nova.compute.manager [req-9d92dec6-c153-4c0d-b835-0974760164e7 req-54fe175e-fab6-4433-9109-4a685eac4922 service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Received event network-changed-b79962c5-9f95-4d9b-ae67-11445b571d91 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2160.272287] env[62875]: DEBUG nova.compute.manager [req-9d92dec6-c153-4c0d-b835-0974760164e7 req-54fe175e-fab6-4433-9109-4a685eac4922 service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Refreshing instance network info cache due to event network-changed-b79962c5-9f95-4d9b-ae67-11445b571d91. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2160.272287] env[62875]: DEBUG oslo_concurrency.lockutils [req-9d92dec6-c153-4c0d-b835-0974760164e7 req-54fe175e-fab6-4433-9109-4a685eac4922 service nova] Acquiring lock "refresh_cache-d40aaba6-020d-45b9-83e7-8d7fe382b20f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2160.272440] env[62875]: DEBUG oslo_concurrency.lockutils [req-9d92dec6-c153-4c0d-b835-0974760164e7 req-54fe175e-fab6-4433-9109-4a685eac4922 service nova] Acquired lock "refresh_cache-d40aaba6-020d-45b9-83e7-8d7fe382b20f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2160.272552] env[62875]: DEBUG nova.network.neutron [req-9d92dec6-c153-4c0d-b835-0974760164e7 req-54fe175e-fab6-4433-9109-4a685eac4922 service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Refreshing network info cache for port b79962c5-9f95-4d9b-ae67-11445b571d91 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2160.348387] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d68e61a-cbe1-4e94-bcc5-014bf1485c6c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.355976] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130ad656-63d4-4758-a3ba-ce18792e2003 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.385964] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2542f6c-2649-4056-820d-ff67161dcbdc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.392819] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07cee00-f7cf-46fe-a651-8d472a0495b3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.405601] env[62875]: DEBUG nova.compute.provider_tree [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2160.573087] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d4d72a-78b9-27b4-0854-1bc2425629e3, 'name': SearchDatastore_Task, 'duration_secs': 0.008631} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.573929] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83e539eb-2fba-4b92-abea-72807cfca658 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.579664] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2160.579664] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a0ce48-fce0-6885-1a41-157382cc3a1c" [ 2160.579664] env[62875]: _type = "Task" [ 2160.579664] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.588438] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a0ce48-fce0-6885-1a41-157382cc3a1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.747774] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180338, 'name': CreateVM_Task, 'duration_secs': 0.399897} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.748466] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2160.748869] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2160.749074] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2160.749407] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2160.749660] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fa086f5-703a-4787-a001-37371fca358b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.754293] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2160.754293] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52343086-0d31-121a-3a2d-fd5cdb208d5f" [ 2160.754293] env[62875]: _type = "Task" [ 2160.754293] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.761887] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52343086-0d31-121a-3a2d-fd5cdb208d5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.909015] env[62875]: DEBUG nova.scheduler.client.report [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2160.969456] env[62875]: DEBUG nova.network.neutron [req-9d92dec6-c153-4c0d-b835-0974760164e7 req-54fe175e-fab6-4433-9109-4a685eac4922 service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Updated VIF entry in instance network info cache for port b79962c5-9f95-4d9b-ae67-11445b571d91. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2160.969833] env[62875]: DEBUG nova.network.neutron [req-9d92dec6-c153-4c0d-b835-0974760164e7 req-54fe175e-fab6-4433-9109-4a685eac4922 service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Updating instance_info_cache with network_info: [{"id": "b79962c5-9f95-4d9b-ae67-11445b571d91", "address": "fa:16:3e:81:1d:7b", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb79962c5-9f", "ovs_interfaceid": "b79962c5-9f95-4d9b-ae67-11445b571d91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2161.089729] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a0ce48-fce0-6885-1a41-157382cc3a1c, 'name': SearchDatastore_Task, 'duration_secs': 0.009342} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.089991] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.090270] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf/3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2161.090552] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-195c7650-417e-4d36-8f1e-16c0696a858f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.097429] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2161.097429] env[62875]: value = "task-2180339" [ 2161.097429] env[62875]: _type = "Task" [ 2161.097429] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.106770] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.169233] env[62875]: DEBUG nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2161.200562] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2161.200816] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2161.200990] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2161.201232] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2161.201393] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2161.201533] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2161.201736] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2161.201891] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2161.202077] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2161.202249] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2161.202420] env[62875]: DEBUG nova.virt.hardware [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2161.203310] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fb4425-aa4a-438e-8b91-db513ab8aa7a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.211143] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da77e50c-bf2d-4673-b2a4-66691c1b4641 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.266272] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52343086-0d31-121a-3a2d-fd5cdb208d5f, 'name': SearchDatastore_Task, 'duration_secs': 0.008993} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.266624] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.266863] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2161.267121] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.267273] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.267459] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2161.267727] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73184eb0-017b-45a2-bccd-3227d390ef95 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.276180] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2161.276372] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2161.277115] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae99c82d-787f-4882-a4f2-a09c593152d6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.284688] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2161.284688] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5228f341-0aa8-1994-916b-243d357706ad" [ 2161.284688] env[62875]: _type = "Task" [ 2161.284688] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.295555] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5228f341-0aa8-1994-916b-243d357706ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.414974] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.414974] env[62875]: DEBUG nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2161.417896] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.209s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.419462] env[62875]: INFO nova.compute.claims [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2161.436869] env[62875]: DEBUG nova.compute.manager [req-54b99a11-8e72-4f2b-903d-23c4542dd72b req-f2935ea8-ea10-4a6a-871d-8b5e6a7da1b6 service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Received event network-vif-plugged-846fd804-bdbf-498c-a3f7-9741200ee2d4 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2161.437167] env[62875]: DEBUG oslo_concurrency.lockutils [req-54b99a11-8e72-4f2b-903d-23c4542dd72b req-f2935ea8-ea10-4a6a-871d-8b5e6a7da1b6 service nova] Acquiring lock "2dd748c2-048d-4450-a393-995249a9deb8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.437367] env[62875]: DEBUG oslo_concurrency.lockutils [req-54b99a11-8e72-4f2b-903d-23c4542dd72b req-f2935ea8-ea10-4a6a-871d-8b5e6a7da1b6 service nova] Lock "2dd748c2-048d-4450-a393-995249a9deb8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.437541] env[62875]: DEBUG oslo_concurrency.lockutils [req-54b99a11-8e72-4f2b-903d-23c4542dd72b req-f2935ea8-ea10-4a6a-871d-8b5e6a7da1b6 service nova] Lock "2dd748c2-048d-4450-a393-995249a9deb8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.437712] env[62875]: DEBUG nova.compute.manager [req-54b99a11-8e72-4f2b-903d-23c4542dd72b req-f2935ea8-ea10-4a6a-871d-8b5e6a7da1b6 service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] No waiting events found dispatching network-vif-plugged-846fd804-bdbf-498c-a3f7-9741200ee2d4 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2161.437879] env[62875]: WARNING nova.compute.manager [req-54b99a11-8e72-4f2b-903d-23c4542dd72b req-f2935ea8-ea10-4a6a-871d-8b5e6a7da1b6 service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Received unexpected event network-vif-plugged-846fd804-bdbf-498c-a3f7-9741200ee2d4 for instance with vm_state building and task_state spawning. [ 2161.472579] env[62875]: DEBUG oslo_concurrency.lockutils [req-9d92dec6-c153-4c0d-b835-0974760164e7 req-54fe175e-fab6-4433-9109-4a685eac4922 service nova] Releasing lock "refresh_cache-d40aaba6-020d-45b9-83e7-8d7fe382b20f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.545831] env[62875]: DEBUG nova.network.neutron [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Successfully updated port: 846fd804-bdbf-498c-a3f7-9741200ee2d4 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2161.608927] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180339, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492929} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.609084] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf/3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2161.609198] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2161.609443] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8be722e-da06-4e66-9090-b7ee16748758 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.615314] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2161.615314] env[62875]: value = "task-2180340" [ 2161.615314] env[62875]: _type = "Task" [ 2161.615314] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.622448] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.794620] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5228f341-0aa8-1994-916b-243d357706ad, 'name': SearchDatastore_Task, 'duration_secs': 0.065814} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.795389] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9709590d-50a4-4d5e-8117-0255bc0cedf7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.800276] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2161.800276] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5208ad20-a18e-e237-e8a2-14dd021b2e3f" [ 2161.800276] env[62875]: _type = "Task" [ 2161.800276] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.807959] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5208ad20-a18e-e237-e8a2-14dd021b2e3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.923949] env[62875]: DEBUG nova.compute.utils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2161.928864] env[62875]: DEBUG nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2161.928864] env[62875]: DEBUG nova.network.neutron [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2161.965163] env[62875]: DEBUG nova.policy [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b7a1b271e194665a976226647254e54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c27ce2b0bc554605a2ea3606d1e182ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2162.049784] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2162.050127] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2162.050127] env[62875]: DEBUG nova.network.neutron [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2162.126779] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062294} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.127071] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2162.127814] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa51679-6f19-4e89-91ff-9d2dda368b58 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.147276] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf/3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2162.147540] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-373d09f7-59bd-4f5b-b928-643ca8dc7482 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.166472] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2162.166472] env[62875]: value = "task-2180341" [ 2162.166472] env[62875]: _type = "Task" [ 2162.166472] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.174440] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180341, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.191987] env[62875]: DEBUG nova.network.neutron [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Successfully created port: bcd6f07a-19fb-4e85-b080-d747bcddbeb5 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2162.312049] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5208ad20-a18e-e237-e8a2-14dd021b2e3f, 'name': SearchDatastore_Task, 'duration_secs': 0.010565} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.312411] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2162.312723] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] d40aaba6-020d-45b9-83e7-8d7fe382b20f/d40aaba6-020d-45b9-83e7-8d7fe382b20f.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2162.313101] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d6198ac-6619-4863-aaad-8f209ee6a20e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.320143] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2162.320143] env[62875]: value = "task-2180342" [ 2162.320143] env[62875]: _type = "Task" [ 2162.320143] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.328179] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.428236] env[62875]: DEBUG nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2162.583125] env[62875]: DEBUG nova.network.neutron [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2162.651999] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d00295-b94d-4be6-a851-1fac94ca44cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.661365] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df876251-ab95-4e3e-8209-702d51f35f8b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.697884] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1a3d55-cc65-4665-b699-384be46a05ea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.703881] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180341, 'name': ReconfigVM_Task, 'duration_secs': 0.276046} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.704563] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf/3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2162.705204] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d30face3-c68b-4139-aaae-68e056d632d9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.710829] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cd6d2a-41e2-433f-be1c-c542ef5efd0a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.718656] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2162.718656] env[62875]: value = "task-2180343" [ 2162.718656] env[62875]: _type = "Task" [ 2162.718656] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.730521] env[62875]: DEBUG nova.compute.provider_tree [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2162.735721] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180343, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.795103] env[62875]: DEBUG nova.network.neutron [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Updating instance_info_cache with network_info: [{"id": "846fd804-bdbf-498c-a3f7-9741200ee2d4", "address": "fa:16:3e:54:d5:c6", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap846fd804-bd", "ovs_interfaceid": "846fd804-bdbf-498c-a3f7-9741200ee2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.829960] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180342, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.229072] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180343, 'name': Rename_Task, 'duration_secs': 0.195112} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.229368] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2163.229603] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfa460f5-dbf6-451b-aded-5dc39add2201 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.237352] env[62875]: DEBUG nova.scheduler.client.report [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2163.240398] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2163.240398] env[62875]: value = "task-2180344" [ 2163.240398] env[62875]: _type = "Task" [ 2163.240398] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.247848] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.297605] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.297948] env[62875]: DEBUG nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Instance network_info: |[{"id": "846fd804-bdbf-498c-a3f7-9741200ee2d4", "address": "fa:16:3e:54:d5:c6", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap846fd804-bd", "ovs_interfaceid": "846fd804-bdbf-498c-a3f7-9741200ee2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2163.298382] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:d5:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '846fd804-bdbf-498c-a3f7-9741200ee2d4', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2163.305916] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Creating folder: Project (c27ce2b0bc554605a2ea3606d1e182ed). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2163.306182] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4891538-02ed-4d62-a5c1-b790e44b897f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.316615] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Created folder: Project (c27ce2b0bc554605a2ea3606d1e182ed) in parent group-v444854. [ 2163.316791] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Creating folder: Instances. Parent ref: group-v444959. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2163.317014] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20b19969-7c92-4837-8950-4a1b6b42eaa7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.326820] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Created folder: Instances in parent group-v444959. [ 2163.327052] env[62875]: DEBUG oslo.service.loopingcall [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2163.330311] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2163.330574] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523856} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.330760] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9996444-d89a-40ee-971b-718f52740cc1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.345975] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] d40aaba6-020d-45b9-83e7-8d7fe382b20f/d40aaba6-020d-45b9-83e7-8d7fe382b20f.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2163.346218] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2163.346462] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9582706e-59b3-4c4b-8174-a20325ab5346 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.352551] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2163.352551] env[62875]: value = "task-2180347" [ 2163.352551] env[62875]: _type = "Task" [ 2163.352551] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.353617] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2163.353617] env[62875]: value = "task-2180348" [ 2163.353617] env[62875]: _type = "Task" [ 2163.353617] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.364142] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180347, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.366519] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180348, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.442630] env[62875]: DEBUG nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2163.466464] env[62875]: DEBUG nova.compute.manager [req-3c350c7f-ac38-4099-92a3-9dd538aacdb2 req-1dac483f-6461-4645-aed7-4b173d675ba5 service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Received event network-changed-846fd804-bdbf-498c-a3f7-9741200ee2d4 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2163.466694] env[62875]: DEBUG nova.compute.manager [req-3c350c7f-ac38-4099-92a3-9dd538aacdb2 req-1dac483f-6461-4645-aed7-4b173d675ba5 service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Refreshing instance network info cache due to event network-changed-846fd804-bdbf-498c-a3f7-9741200ee2d4. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2163.466925] env[62875]: DEBUG oslo_concurrency.lockutils [req-3c350c7f-ac38-4099-92a3-9dd538aacdb2 req-1dac483f-6461-4645-aed7-4b173d675ba5 service nova] Acquiring lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2163.467083] env[62875]: DEBUG oslo_concurrency.lockutils [req-3c350c7f-ac38-4099-92a3-9dd538aacdb2 req-1dac483f-6461-4645-aed7-4b173d675ba5 service nova] Acquired lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2163.467246] env[62875]: DEBUG nova.network.neutron [req-3c350c7f-ac38-4099-92a3-9dd538aacdb2 req-1dac483f-6461-4645-aed7-4b173d675ba5 service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Refreshing network info cache for port 846fd804-bdbf-498c-a3f7-9741200ee2d4 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2163.470235] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2163.470455] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2163.470609] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2163.470788] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2163.470941] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2163.471155] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2163.471370] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2163.471530] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2163.471694] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2163.471854] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2163.472037] env[62875]: DEBUG nova.virt.hardware [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2163.473162] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b5350b-383c-434e-9051-790d21dbe1ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.482598] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4661ff0b-6694-4c67-af67-6d08e333a1e9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.747036] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.747501] env[62875]: DEBUG nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2163.750200] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.378s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.751787] env[62875]: INFO nova.compute.claims [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2163.760422] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180344, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.830584] env[62875]: DEBUG nova.network.neutron [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Successfully updated port: bcd6f07a-19fb-4e85-b080-d747bcddbeb5 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2163.866190] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180347, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098274} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.869271] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2163.869515] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180348, 'name': CreateVM_Task, 'duration_secs': 0.399175} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.870205] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35d374f-9624-4f99-aa6f-f2e275d46cf9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.872625] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2163.873343] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2163.873506] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2163.873813] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2163.874331] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1013809-de28-46b4-9647-442947db24ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.893539] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] d40aaba6-020d-45b9-83e7-8d7fe382b20f/d40aaba6-020d-45b9-83e7-8d7fe382b20f.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2163.894629] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e759a710-fc83-4417-9fc8-c5990289208a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.908552] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2163.908552] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527a05c1-0135-15d9-1831-d6217cf1d2c2" [ 2163.908552] env[62875]: _type = "Task" [ 2163.908552] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.914090] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2163.914090] env[62875]: value = "task-2180349" [ 2163.914090] env[62875]: _type = "Task" [ 2163.914090] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.917552] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527a05c1-0135-15d9-1831-d6217cf1d2c2, 'name': SearchDatastore_Task, 'duration_secs': 0.008429} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.920251] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.920481] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2163.920705] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2163.920853] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2163.921069] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2163.921306] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da04b6de-500b-464c-9d7a-00c071284af0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.927343] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180349, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.928283] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2163.928456] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2163.929144] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8e36bd5-fc28-4fad-905f-9195ea8ad4de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.933585] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2163.933585] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b67d4b-c9d1-0ace-6cc9-1ffc01db91e2" [ 2163.933585] env[62875]: _type = "Task" [ 2163.933585] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.940907] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b67d4b-c9d1-0ace-6cc9-1ffc01db91e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.199999] env[62875]: DEBUG nova.network.neutron [req-3c350c7f-ac38-4099-92a3-9dd538aacdb2 req-1dac483f-6461-4645-aed7-4b173d675ba5 service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Updated VIF entry in instance network info cache for port 846fd804-bdbf-498c-a3f7-9741200ee2d4. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2164.200484] env[62875]: DEBUG nova.network.neutron [req-3c350c7f-ac38-4099-92a3-9dd538aacdb2 req-1dac483f-6461-4645-aed7-4b173d675ba5 service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Updating instance_info_cache with network_info: [{"id": "846fd804-bdbf-498c-a3f7-9741200ee2d4", "address": "fa:16:3e:54:d5:c6", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap846fd804-bd", "ovs_interfaceid": "846fd804-bdbf-498c-a3f7-9741200ee2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2164.252251] env[62875]: DEBUG oslo_vmware.api [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180344, 'name': PowerOnVM_Task, 'duration_secs': 0.6158} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.252627] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2164.252756] env[62875]: INFO nova.compute.manager [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Took 5.34 seconds to spawn the instance on the hypervisor. [ 2164.252951] env[62875]: DEBUG nova.compute.manager [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2164.253722] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60afdf14-8288-4664-bd75-4e777c3f0309 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.259016] env[62875]: DEBUG nova.compute.utils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2164.260621] env[62875]: DEBUG nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2164.260803] env[62875]: DEBUG nova.network.neutron [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2164.299977] env[62875]: DEBUG nova.policy [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b7a1b271e194665a976226647254e54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c27ce2b0bc554605a2ea3606d1e182ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2164.333387] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "refresh_cache-79afdeda-8a95-4ad4-ba10-0424cedf1d6f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.333506] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "refresh_cache-79afdeda-8a95-4ad4-ba10-0424cedf1d6f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.333662] env[62875]: DEBUG nova.network.neutron [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2164.427297] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180349, 'name': ReconfigVM_Task, 'duration_secs': 0.407687} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.427565] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Reconfigured VM instance instance-00000047 to attach disk [datastore2] d40aaba6-020d-45b9-83e7-8d7fe382b20f/d40aaba6-020d-45b9-83e7-8d7fe382b20f.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2164.428227] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb30202f-5dd9-41a0-a0bd-195eed10cbb1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.434373] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2164.434373] env[62875]: value = "task-2180350" [ 2164.434373] env[62875]: _type = "Task" [ 2164.434373] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.445172] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180350, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.453019] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b67d4b-c9d1-0ace-6cc9-1ffc01db91e2, 'name': SearchDatastore_Task, 'duration_secs': 0.01016} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.453019] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a8fb993-c4a9-4f6b-93ae-71b6bfbea9fb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.455181] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2164.455181] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522cccf8-2886-9467-1330-b03414ec5753" [ 2164.455181] env[62875]: _type = "Task" [ 2164.455181] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.465351] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522cccf8-2886-9467-1330-b03414ec5753, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.703698] env[62875]: DEBUG oslo_concurrency.lockutils [req-3c350c7f-ac38-4099-92a3-9dd538aacdb2 req-1dac483f-6461-4645-aed7-4b173d675ba5 service nova] Releasing lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2164.764700] env[62875]: DEBUG nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2164.776363] env[62875]: INFO nova.compute.manager [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Took 16.91 seconds to build instance. [ 2164.782971] env[62875]: DEBUG nova.network.neutron [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Successfully created port: 6c2b6b8c-9b11-4731-a57c-2d56d2693b1b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2164.872201] env[62875]: DEBUG nova.network.neutron [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2164.948643] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180350, 'name': Rename_Task, 'duration_secs': 0.188756} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.948998] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2164.949353] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48394d22-559a-4fdc-8c54-21f59a405365 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.963376] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2164.963376] env[62875]: value = "task-2180351" [ 2164.963376] env[62875]: _type = "Task" [ 2164.963376] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.974967] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522cccf8-2886-9467-1330-b03414ec5753, 'name': SearchDatastore_Task, 'duration_secs': 0.009853} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.975793] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2164.976522] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 2dd748c2-048d-4450-a393-995249a9deb8/2dd748c2-048d-4450-a393-995249a9deb8.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2164.976790] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ed1b3d9-ff06-4c87-992a-35692b352937 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.983130] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180351, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.988163] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2164.988163] env[62875]: value = "task-2180352" [ 2164.988163] env[62875]: _type = "Task" [ 2164.988163] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.998157] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.999143] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3783c7d2-0400-4239-a4ff-f2f013b30a84 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.007012] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fd3811-8178-4039-a32f-80f748e57e4f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.044996] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e991d7e7-f223-4389-be31-d68b35d4b3fe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.053452] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec230fd-9ccf-4758-9077-88f06ac745b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.069463] env[62875]: DEBUG nova.compute.provider_tree [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2165.090968] env[62875]: DEBUG nova.network.neutron [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Updating instance_info_cache with network_info: [{"id": "bcd6f07a-19fb-4e85-b080-d747bcddbeb5", "address": "fa:16:3e:ae:23:76", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcd6f07a-19", "ovs_interfaceid": "bcd6f07a-19fb-4e85-b080-d747bcddbeb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.277403] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f85a8850-595f-4730-acd4-9c0f05db7da9 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.420s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.471353] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180351, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.490199] env[62875]: DEBUG nova.compute.manager [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Received event network-vif-plugged-bcd6f07a-19fb-4e85-b080-d747bcddbeb5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2165.490199] env[62875]: DEBUG oslo_concurrency.lockutils [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] Acquiring lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.490199] env[62875]: DEBUG oslo_concurrency.lockutils [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] Lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.490199] env[62875]: DEBUG oslo_concurrency.lockutils [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] Lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.490351] env[62875]: DEBUG nova.compute.manager [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] No waiting events found dispatching network-vif-plugged-bcd6f07a-19fb-4e85-b080-d747bcddbeb5 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2165.490415] env[62875]: WARNING nova.compute.manager [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Received unexpected event network-vif-plugged-bcd6f07a-19fb-4e85-b080-d747bcddbeb5 for instance with vm_state building and task_state spawning. [ 2165.490573] env[62875]: DEBUG nova.compute.manager [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Received event network-changed-bcd6f07a-19fb-4e85-b080-d747bcddbeb5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2165.490728] env[62875]: DEBUG nova.compute.manager [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Refreshing instance network info cache due to event network-changed-bcd6f07a-19fb-4e85-b080-d747bcddbeb5. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2165.490892] env[62875]: DEBUG oslo_concurrency.lockutils [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] Acquiring lock "refresh_cache-79afdeda-8a95-4ad4-ba10-0424cedf1d6f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2165.499988] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455574} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.500240] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 2dd748c2-048d-4450-a393-995249a9deb8/2dd748c2-048d-4450-a393-995249a9deb8.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2165.500450] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2165.500677] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-771b2c8e-a8db-4f1a-ab48-e38f96f36a4f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.506726] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2165.506726] env[62875]: value = "task-2180353" [ 2165.506726] env[62875]: _type = "Task" [ 2165.506726] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.514717] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.573073] env[62875]: DEBUG nova.scheduler.client.report [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2165.593161] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "refresh_cache-79afdeda-8a95-4ad4-ba10-0424cedf1d6f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2165.593450] env[62875]: DEBUG nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Instance network_info: |[{"id": "bcd6f07a-19fb-4e85-b080-d747bcddbeb5", "address": "fa:16:3e:ae:23:76", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcd6f07a-19", "ovs_interfaceid": "bcd6f07a-19fb-4e85-b080-d747bcddbeb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2165.593721] env[62875]: DEBUG oslo_concurrency.lockutils [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] Acquired lock "refresh_cache-79afdeda-8a95-4ad4-ba10-0424cedf1d6f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2165.593892] env[62875]: DEBUG nova.network.neutron [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Refreshing network info cache for port bcd6f07a-19fb-4e85-b080-d747bcddbeb5 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2165.594934] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:23:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcd6f07a-19fb-4e85-b080-d747bcddbeb5', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2165.602296] env[62875]: DEBUG oslo.service.loopingcall [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2165.603043] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2165.603270] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59b51062-606a-44a9-827c-e9c64d6db71e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.625933] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2165.625933] env[62875]: value = "task-2180354" [ 2165.625933] env[62875]: _type = "Task" [ 2165.625933] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.634505] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180354, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.684010] env[62875]: INFO nova.compute.manager [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Rebuilding instance [ 2165.728850] env[62875]: DEBUG nova.compute.manager [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2165.729745] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c55ba75-3cc4-4c5b-9adb-69deead362f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.773965] env[62875]: DEBUG nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2165.798842] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2165.799245] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2165.799502] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2165.799797] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2165.800065] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2165.800325] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2165.800750] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2165.801042] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2165.801329] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2165.801610] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2165.801901] env[62875]: DEBUG nova.virt.hardware [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2165.803234] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63f89bc-f3ef-45e1-84b2-b74c845c6b8f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.814415] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21286ebc-4019-4b9b-af3b-bc391a284fdf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.972711] env[62875]: DEBUG oslo_vmware.api [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180351, 'name': PowerOnVM_Task, 'duration_secs': 0.775862} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.972711] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2165.972943] env[62875]: INFO nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Took 14.12 seconds to spawn the instance on the hypervisor. [ 2165.973142] env[62875]: DEBUG nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2165.974344] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3503fd6e-0b7d-4da6-914d-6c48ad19f732 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.017921] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.257413} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.018268] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2166.019207] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726e86ff-d86b-42d5-acfa-b17115a93d61 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.043231] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 2dd748c2-048d-4450-a393-995249a9deb8/2dd748c2-048d-4450-a393-995249a9deb8.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2166.043790] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-419359ce-e530-4095-9587-151e0493bd72 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.063598] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2166.063598] env[62875]: value = "task-2180355" [ 2166.063598] env[62875]: _type = "Task" [ 2166.063598] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.073425] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.077638] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2166.078204] env[62875]: DEBUG nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2166.080794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 15.631s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.136020] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180354, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.328627] env[62875]: DEBUG nova.network.neutron [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Updated VIF entry in instance network info cache for port bcd6f07a-19fb-4e85-b080-d747bcddbeb5. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2166.329108] env[62875]: DEBUG nova.network.neutron [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Updating instance_info_cache with network_info: [{"id": "bcd6f07a-19fb-4e85-b080-d747bcddbeb5", "address": "fa:16:3e:ae:23:76", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcd6f07a-19", "ovs_interfaceid": "bcd6f07a-19fb-4e85-b080-d747bcddbeb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2166.388463] env[62875]: DEBUG nova.compute.manager [req-cb9aacc3-8808-40fc-8333-29fcfa3dd45a req-aa196534-169d-4385-8635-65edddf30e68 service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Received event network-vif-plugged-6c2b6b8c-9b11-4731-a57c-2d56d2693b1b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2166.388553] env[62875]: DEBUG oslo_concurrency.lockutils [req-cb9aacc3-8808-40fc-8333-29fcfa3dd45a req-aa196534-169d-4385-8635-65edddf30e68 service nova] Acquiring lock "85be399c-2482-4a19-b68f-b45aa4e6846b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.388691] env[62875]: DEBUG oslo_concurrency.lockutils [req-cb9aacc3-8808-40fc-8333-29fcfa3dd45a req-aa196534-169d-4385-8635-65edddf30e68 service nova] Lock "85be399c-2482-4a19-b68f-b45aa4e6846b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.388859] env[62875]: DEBUG oslo_concurrency.lockutils [req-cb9aacc3-8808-40fc-8333-29fcfa3dd45a req-aa196534-169d-4385-8635-65edddf30e68 service nova] Lock "85be399c-2482-4a19-b68f-b45aa4e6846b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2166.389032] env[62875]: DEBUG nova.compute.manager [req-cb9aacc3-8808-40fc-8333-29fcfa3dd45a req-aa196534-169d-4385-8635-65edddf30e68 service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] No waiting events found dispatching network-vif-plugged-6c2b6b8c-9b11-4731-a57c-2d56d2693b1b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2166.389204] env[62875]: WARNING nova.compute.manager [req-cb9aacc3-8808-40fc-8333-29fcfa3dd45a req-aa196534-169d-4385-8635-65edddf30e68 service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Received unexpected event network-vif-plugged-6c2b6b8c-9b11-4731-a57c-2d56d2693b1b for instance with vm_state building and task_state spawning. [ 2166.438681] env[62875]: DEBUG nova.network.neutron [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Successfully updated port: 6c2b6b8c-9b11-4731-a57c-2d56d2693b1b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2166.493681] env[62875]: INFO nova.compute.manager [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Took 21.72 seconds to build instance. [ 2166.574820] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180355, 'name': ReconfigVM_Task, 'duration_secs': 0.279263} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.575108] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 2dd748c2-048d-4450-a393-995249a9deb8/2dd748c2-048d-4450-a393-995249a9deb8.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2166.575776] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ebeb5ee-09ea-45f3-b838-73b69b62d090 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.581797] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2166.581797] env[62875]: value = "task-2180356" [ 2166.581797] env[62875]: _type = "Task" [ 2166.581797] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.585611] env[62875]: DEBUG nova.compute.utils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2166.589465] env[62875]: INFO nova.compute.claims [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2166.593347] env[62875]: DEBUG nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2166.593509] env[62875]: DEBUG nova.network.neutron [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2166.600724] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180356, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.633223] env[62875]: DEBUG nova.policy [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e79993abf5eb47cc8449e3468d3cdd4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bef7d358bb2746efb448dbf759cac58c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2166.638144] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180354, 'name': CreateVM_Task, 'duration_secs': 0.619058} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.638301] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2166.638914] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2166.639136] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2166.639493] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2166.639703] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-316400c4-da7c-4cd8-9eaf-a57a14166c17 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.644560] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2166.644560] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cdde6e-0a82-7fe6-5020-61534a72961f" [ 2166.644560] env[62875]: _type = "Task" [ 2166.644560] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.652787] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cdde6e-0a82-7fe6-5020-61534a72961f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.743121] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2166.743470] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1adb4afb-29a5-4eca-bb60-d54e8ebd655b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.752605] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2166.752605] env[62875]: value = "task-2180357" [ 2166.752605] env[62875]: _type = "Task" [ 2166.752605] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.763644] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.832390] env[62875]: DEBUG oslo_concurrency.lockutils [req-58affd3b-0bb2-4eff-8bb2-e94d006daae4 req-5d510f04-0ee9-4e2a-a54a-c5da517b0c12 service nova] Releasing lock "refresh_cache-79afdeda-8a95-4ad4-ba10-0424cedf1d6f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2166.913646] env[62875]: DEBUG nova.network.neutron [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Successfully created port: 638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2166.940732] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "refresh_cache-85be399c-2482-4a19-b68f-b45aa4e6846b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2166.940863] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "refresh_cache-85be399c-2482-4a19-b68f-b45aa4e6846b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2166.941026] env[62875]: DEBUG nova.network.neutron [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2166.995976] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee346228-1dcb-4e8c-b261-0610a80caef3 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.234s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.092626] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180356, 'name': Rename_Task, 'duration_secs': 0.330036} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.092626] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2167.092755] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e3033c2-5f23-47a6-a890-473101355f1b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.094537] env[62875]: DEBUG nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2167.098263] env[62875]: INFO nova.compute.resource_tracker [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating resource usage from migration 9265753b-3d23-4aaf-b8fb-0f4a7f38de9d [ 2167.105944] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2167.105944] env[62875]: value = "task-2180358" [ 2167.105944] env[62875]: _type = "Task" [ 2167.105944] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.114054] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180358, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.156764] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cdde6e-0a82-7fe6-5020-61534a72961f, 'name': SearchDatastore_Task, 'duration_secs': 0.034418} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.159360] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2167.159600] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2167.159833] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.159982] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.160178] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.161503] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-925de93b-e9ad-454e-86fe-7f4d8f57c0e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.170238] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.170301] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2167.173632] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47243c75-3b97-4e0b-b74b-ee38e1196a33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.179339] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2167.179339] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252a1f5-45e2-0aec-97ed-e7d08353f44d" [ 2167.179339] env[62875]: _type = "Task" [ 2167.179339] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.188286] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252a1f5-45e2-0aec-97ed-e7d08353f44d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.265511] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180357, 'name': PowerOffVM_Task, 'duration_secs': 0.3684} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.265831] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2167.266080] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2167.266881] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5809cccd-7817-47f1-bc18-43fc7064b647 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.277133] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2167.277381] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-453d4b50-5220-4889-b82b-8672d545265a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.310185] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2167.310426] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2167.310608] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Deleting the datastore file [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2167.310871] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67f0d5ef-25ca-4c76-b4db-21aec9c48e45 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.317557] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2167.317557] env[62875]: value = "task-2180360" [ 2167.317557] env[62875]: _type = "Task" [ 2167.317557] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.328331] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.334436] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e58a90-6d89-4c71-b7a5-49e4682a3382 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.341419] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46303e1d-b454-4086-b9d2-56fab5566d2c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.371728] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ba4fc8-156d-4194-b358-297bcdf9e5fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.379793] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cf5236-a468-4474-aa45-4772fd555277 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.393397] env[62875]: DEBUG nova.compute.provider_tree [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2167.476653] env[62875]: DEBUG nova.network.neutron [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2167.595700] env[62875]: DEBUG nova.compute.manager [req-24cba138-2b34-4527-8386-e738a396990e req-cc54754a-20ba-4372-b3db-ea9e36017a72 service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Received event network-changed-6c2b6b8c-9b11-4731-a57c-2d56d2693b1b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2167.595700] env[62875]: DEBUG nova.compute.manager [req-24cba138-2b34-4527-8386-e738a396990e req-cc54754a-20ba-4372-b3db-ea9e36017a72 service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Refreshing instance network info cache due to event network-changed-6c2b6b8c-9b11-4731-a57c-2d56d2693b1b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2167.596039] env[62875]: DEBUG oslo_concurrency.lockutils [req-24cba138-2b34-4527-8386-e738a396990e req-cc54754a-20ba-4372-b3db-ea9e36017a72 service nova] Acquiring lock "refresh_cache-85be399c-2482-4a19-b68f-b45aa4e6846b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.616282] env[62875]: DEBUG oslo_vmware.api [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180358, 'name': PowerOnVM_Task, 'duration_secs': 0.478329} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.616787] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2167.617101] env[62875]: INFO nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Took 6.45 seconds to spawn the instance on the hypervisor. [ 2167.617401] env[62875]: DEBUG nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2167.618253] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12680d4a-3a3e-499a-baf5-6b9ccb96305e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.656240] env[62875]: DEBUG nova.network.neutron [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Updating instance_info_cache with network_info: [{"id": "6c2b6b8c-9b11-4731-a57c-2d56d2693b1b", "address": "fa:16:3e:17:0c:77", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2b6b8c-9b", "ovs_interfaceid": "6c2b6b8c-9b11-4731-a57c-2d56d2693b1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2167.691765] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252a1f5-45e2-0aec-97ed-e7d08353f44d, 'name': SearchDatastore_Task, 'duration_secs': 0.013993} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.692838] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ee43a2e-1d85-476e-8f22-b83f2d46d434 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.698246] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2167.698246] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f6d70a-1107-a935-3522-04d1c0295ea1" [ 2167.698246] env[62875]: _type = "Task" [ 2167.698246] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.707669] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f6d70a-1107-a935-3522-04d1c0295ea1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.829338] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113109} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.829743] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2167.830219] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2167.830295] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2167.896364] env[62875]: DEBUG nova.scheduler.client.report [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2168.107555] env[62875]: DEBUG nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2168.137992] env[62875]: INFO nova.compute.manager [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Took 19.67 seconds to build instance. [ 2168.142886] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='eee0994b720641b78deb472eac5cf97f',container_format='bare',created_at=2025-01-25T05:16:38Z,direct_url=,disk_format='vmdk',id=c3db3f4b-7101-488a-81eb-25039e53d59c,min_disk=1,min_ram=0,name='tempest-test-snap-711526859',owner='bef7d358bb2746efb448dbf759cac58c',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-01-25T05:16:52Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2168.143237] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2168.143523] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2168.143783] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2168.144019] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2168.144258] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2168.144571] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2168.145131] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2168.145266] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2168.145446] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2168.145699] env[62875]: DEBUG nova.virt.hardware [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2168.146935] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ed1386-6dd1-46ea-b431-e6d1c61e4449 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.159621] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347eed71-265e-438e-a986-a84dd3b2241c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.166377] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "refresh_cache-85be399c-2482-4a19-b68f-b45aa4e6846b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2168.166769] env[62875]: DEBUG nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Instance network_info: |[{"id": "6c2b6b8c-9b11-4731-a57c-2d56d2693b1b", "address": "fa:16:3e:17:0c:77", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2b6b8c-9b", "ovs_interfaceid": "6c2b6b8c-9b11-4731-a57c-2d56d2693b1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2168.167572] env[62875]: DEBUG oslo_concurrency.lockutils [req-24cba138-2b34-4527-8386-e738a396990e req-cc54754a-20ba-4372-b3db-ea9e36017a72 service nova] Acquired lock "refresh_cache-85be399c-2482-4a19-b68f-b45aa4e6846b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2168.167711] env[62875]: DEBUG nova.network.neutron [req-24cba138-2b34-4527-8386-e738a396990e req-cc54754a-20ba-4372-b3db-ea9e36017a72 service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Refreshing network info cache for port 6c2b6b8c-9b11-4731-a57c-2d56d2693b1b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2168.169445] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:0c:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c2b6b8c-9b11-4731-a57c-2d56d2693b1b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2168.176458] env[62875]: DEBUG oslo.service.loopingcall [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2168.177641] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2168.177861] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b4a5fe7-68ef-4acc-83f0-efd2ea82bebc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.206712] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2168.206712] env[62875]: value = "task-2180361" [ 2168.206712] env[62875]: _type = "Task" [ 2168.206712] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.210462] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f6d70a-1107-a935-3522-04d1c0295ea1, 'name': SearchDatastore_Task, 'duration_secs': 0.011553} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.213267] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2168.213553] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 79afdeda-8a95-4ad4-ba10-0424cedf1d6f/79afdeda-8a95-4ad4-ba10-0424cedf1d6f.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2168.213790] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6c59798-8309-4121-8bd8-3ba3d2975592 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.221912] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180361, 'name': CreateVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.223080] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2168.223080] env[62875]: value = "task-2180362" [ 2168.223080] env[62875]: _type = "Task" [ 2168.223080] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.231267] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.324166] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "cb4941dc-1690-46b5-93f9-407198fc1332" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.324420] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "cb4941dc-1690-46b5-93f9-407198fc1332" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.401720] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.321s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.402100] env[62875]: INFO nova.compute.manager [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Migrating [ 2168.402274] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2168.402446] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2168.403796] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.177s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.404138] env[62875]: DEBUG nova.objects.instance [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lazy-loading 'resources' on Instance uuid 7c081de0-1952-4ca8-8f6f-80102e20bff0 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2168.639973] env[62875]: DEBUG oslo_concurrency.lockutils [None req-72c8196f-3964-4624-b3dc-357f7ee5475e tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "2dd748c2-048d-4450-a393-995249a9deb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.183s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.728266] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180361, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.743162] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516372} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.743451] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 79afdeda-8a95-4ad4-ba10-0424cedf1d6f/79afdeda-8a95-4ad4-ba10-0424cedf1d6f.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2168.743666] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2168.743933] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2eeabb16-5184-42e5-84df-aa206d5e9807 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.751060] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2168.751060] env[62875]: value = "task-2180363" [ 2168.751060] env[62875]: _type = "Task" [ 2168.751060] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.759794] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.799962] env[62875]: DEBUG nova.network.neutron [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Successfully updated port: 638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2168.826744] env[62875]: DEBUG nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2168.868215] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2168.868477] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2168.868716] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2168.869263] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2168.869263] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2168.869263] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2168.869483] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2168.869661] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2168.869832] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2168.869996] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2168.870185] env[62875]: DEBUG nova.virt.hardware [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2168.871474] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cc77fb-c043-4bf1-968b-15d6c8011bc9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.880265] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec904c55-d217-4673-9fc5-28fa200f3cf4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.894950] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2168.900431] env[62875]: DEBUG oslo.service.loopingcall [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2168.902742] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2168.903165] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-295dd967-4be5-4703-97e3-68d7baa0bd71 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.914765] env[62875]: INFO nova.compute.rpcapi [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 2168.915276] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2168.924182] env[62875]: DEBUG nova.objects.instance [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lazy-loading 'numa_topology' on Instance uuid 7c081de0-1952-4ca8-8f6f-80102e20bff0 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2168.935354] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2168.935354] env[62875]: value = "task-2180364" [ 2168.935354] env[62875]: _type = "Task" [ 2168.935354] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.946224] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180364, 'name': CreateVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.949990] env[62875]: DEBUG nova.network.neutron [req-24cba138-2b34-4527-8386-e738a396990e req-cc54754a-20ba-4372-b3db-ea9e36017a72 service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Updated VIF entry in instance network info cache for port 6c2b6b8c-9b11-4731-a57c-2d56d2693b1b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2168.950369] env[62875]: DEBUG nova.network.neutron [req-24cba138-2b34-4527-8386-e738a396990e req-cc54754a-20ba-4372-b3db-ea9e36017a72 service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Updating instance_info_cache with network_info: [{"id": "6c2b6b8c-9b11-4731-a57c-2d56d2693b1b", "address": "fa:16:3e:17:0c:77", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2b6b8c-9b", "ovs_interfaceid": "6c2b6b8c-9b11-4731-a57c-2d56d2693b1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.224823] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180361, 'name': CreateVM_Task, 'duration_secs': 0.618458} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.225015] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2169.226059] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.226272] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.226806] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2169.227072] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-578300b7-6d33-49b6-b4dc-5c7c434dea54 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.232011] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2169.232011] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b63806-810f-bd35-ad31-3b182a00d930" [ 2169.232011] env[62875]: _type = "Task" [ 2169.232011] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.239363] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b63806-810f-bd35-ad31-3b182a00d930, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.258659] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065289} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.258895] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2169.259708] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f61b60-ea46-4033-9090-04c1d9069523 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.282484] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 79afdeda-8a95-4ad4-ba10-0424cedf1d6f/79afdeda-8a95-4ad4-ba10-0424cedf1d6f.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2169.282730] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68cde83b-108c-4124-82a6-e302f7f6c696 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.301695] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2169.301695] env[62875]: value = "task-2180365" [ 2169.301695] env[62875]: _type = "Task" [ 2169.301695] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.303045] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "refresh_cache-5224c475-8739-4137-82e7-c9d149d41d61" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.303045] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "refresh_cache-5224c475-8739-4137-82e7-c9d149d41d61" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.303045] env[62875]: DEBUG nova.network.neutron [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2169.310992] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180365, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.352207] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.427599] env[62875]: DEBUG nova.objects.base [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Object Instance<7c081de0-1952-4ca8-8f6f-80102e20bff0> lazy-loaded attributes: resources,numa_topology {{(pid=62875) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2169.433975] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.434223] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.434401] env[62875]: DEBUG nova.network.neutron [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2169.446952] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180364, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.453424] env[62875]: DEBUG oslo_concurrency.lockutils [req-24cba138-2b34-4527-8386-e738a396990e req-cc54754a-20ba-4372-b3db-ea9e36017a72 service nova] Releasing lock "refresh_cache-85be399c-2482-4a19-b68f-b45aa4e6846b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2169.629236] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7961ad57-f5bf-4cc5-830f-173023ea2f4c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.637014] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69d37dd-7422-45ef-8505-c025036828fe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.669894] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a7ae57-02e9-4a22-b234-f2e801a72359 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.679028] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76be1a4-5ff0-4a66-aa68-f80e3dc8cbd7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.694841] env[62875]: DEBUG nova.compute.provider_tree [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2169.714481] env[62875]: DEBUG nova.compute.manager [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Received event network-vif-plugged-638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2169.714606] env[62875]: DEBUG oslo_concurrency.lockutils [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] Acquiring lock "5224c475-8739-4137-82e7-c9d149d41d61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.714819] env[62875]: DEBUG oslo_concurrency.lockutils [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] Lock "5224c475-8739-4137-82e7-c9d149d41d61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.715067] env[62875]: DEBUG oslo_concurrency.lockutils [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] Lock "5224c475-8739-4137-82e7-c9d149d41d61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.715296] env[62875]: DEBUG nova.compute.manager [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] No waiting events found dispatching network-vif-plugged-638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2169.715507] env[62875]: WARNING nova.compute.manager [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Received unexpected event network-vif-plugged-638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9 for instance with vm_state building and task_state spawning. [ 2169.715700] env[62875]: DEBUG nova.compute.manager [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Received event network-changed-638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2169.715879] env[62875]: DEBUG nova.compute.manager [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Refreshing instance network info cache due to event network-changed-638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2169.716091] env[62875]: DEBUG oslo_concurrency.lockutils [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] Acquiring lock "refresh_cache-5224c475-8739-4137-82e7-c9d149d41d61" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.744232] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b63806-810f-bd35-ad31-3b182a00d930, 'name': SearchDatastore_Task, 'duration_secs': 0.054194} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.744545] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2169.744888] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2169.745571] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.745571] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.745571] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2169.746272] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2da76195-4e8c-4197-8651-b651b0a64865 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.758173] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2169.758373] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2169.759131] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6c3e2ec-2f13-4116-aadb-0678718b084b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.764383] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2169.764383] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528319a0-b869-8255-e95f-a2166729fe4f" [ 2169.764383] env[62875]: _type = "Task" [ 2169.764383] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.771980] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528319a0-b869-8255-e95f-a2166729fe4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.812637] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180365, 'name': ReconfigVM_Task, 'duration_secs': 0.310223} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.813292] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 79afdeda-8a95-4ad4-ba10-0424cedf1d6f/79afdeda-8a95-4ad4-ba10-0424cedf1d6f.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2169.814089] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccfcefad-d3d6-42c3-b1ee-957d8e5d249f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.820963] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2169.820963] env[62875]: value = "task-2180366" [ 2169.820963] env[62875]: _type = "Task" [ 2169.820963] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.828894] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180366, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.843316] env[62875]: DEBUG nova.network.neutron [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2169.948203] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180364, 'name': CreateVM_Task, 'duration_secs': 0.774152} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.948352] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2169.948710] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.948869] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.949237] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2169.949491] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d393bc86-7e6c-41f0-94ed-8118223ad563 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.955872] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2169.955872] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528d7241-b6cb-fe50-4953-b4c02b2b6eee" [ 2169.955872] env[62875]: _type = "Task" [ 2169.955872] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.963403] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528d7241-b6cb-fe50-4953-b4c02b2b6eee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.990355] env[62875]: DEBUG nova.network.neutron [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Updating instance_info_cache with network_info: [{"id": "638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9", "address": "fa:16:3e:1d:3f:22", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap638fe5f9-1f", "ovs_interfaceid": "638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2170.135816] env[62875]: DEBUG nova.network.neutron [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance_info_cache with network_info: [{"id": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "address": "fa:16:3e:de:04:4b", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c18fe19-36", "ovs_interfaceid": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2170.198521] env[62875]: DEBUG nova.scheduler.client.report [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2170.277193] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528319a0-b869-8255-e95f-a2166729fe4f, 'name': SearchDatastore_Task, 'duration_secs': 0.04701} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.277981] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1abe5da9-d928-4ae5-943c-8053b51d2897 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.283610] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2170.283610] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b0c76-9a9b-3901-3f82-470a29ea0a5c" [ 2170.283610] env[62875]: _type = "Task" [ 2170.283610] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.291095] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b0c76-9a9b-3901-3f82-470a29ea0a5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.329891] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180366, 'name': Rename_Task, 'duration_secs': 0.146813} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.330174] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2170.330408] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d26349d1-944c-4365-986d-3125e01276b7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.336482] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2170.336482] env[62875]: value = "task-2180367" [ 2170.336482] env[62875]: _type = "Task" [ 2170.336482] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.343963] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180367, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.469571] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528d7241-b6cb-fe50-4953-b4c02b2b6eee, 'name': SearchDatastore_Task, 'duration_secs': 0.034137} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.470052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.470423] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2170.470794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.471110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.471426] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2170.471827] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1186a493-8acc-419d-b3fe-c1a561a5bab1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.482725] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2170.482935] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2170.483895] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10c86787-cd99-4925-8ad6-c45e8421872d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.489650] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2170.489650] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52857458-78a3-229f-a668-fd4a895f2fbf" [ 2170.489650] env[62875]: _type = "Task" [ 2170.489650] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.493553] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "refresh_cache-5224c475-8739-4137-82e7-c9d149d41d61" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.493879] env[62875]: DEBUG nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Instance network_info: |[{"id": "638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9", "address": "fa:16:3e:1d:3f:22", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap638fe5f9-1f", "ovs_interfaceid": "638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2170.494205] env[62875]: DEBUG oslo_concurrency.lockutils [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] Acquired lock "refresh_cache-5224c475-8739-4137-82e7-c9d149d41d61" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.494409] env[62875]: DEBUG nova.network.neutron [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Refreshing network info cache for port 638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2170.495742] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:3f:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2170.503791] env[62875]: DEBUG oslo.service.loopingcall [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2170.507656] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2170.512300] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a45ed73e-3d5d-48c9-9871-dffeb19e31f3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.528021] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52857458-78a3-229f-a668-fd4a895f2fbf, 'name': SearchDatastore_Task, 'duration_secs': 0.010892} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.529360] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7036475e-2998-4c6e-8ad2-8a8e4a81b801 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.533983] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2170.533983] env[62875]: value = "task-2180368" [ 2170.533983] env[62875]: _type = "Task" [ 2170.533983] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.538542] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2170.538542] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b41a12-a058-8462-8442-222c34af923f" [ 2170.538542] env[62875]: _type = "Task" [ 2170.538542] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.545976] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180368, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.551637] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b41a12-a058-8462-8442-222c34af923f, 'name': SearchDatastore_Task} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.551939] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.552224] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf/3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2170.552517] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01a88831-9c09-43ef-8bde-23d4dca7c55a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.558657] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2170.558657] env[62875]: value = "task-2180369" [ 2170.558657] env[62875]: _type = "Task" [ 2170.558657] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.568648] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.638743] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.703301] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.299s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.707132] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.355s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.710065] env[62875]: INFO nova.compute.claims [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2170.795144] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b0c76-9a9b-3901-3f82-470a29ea0a5c, 'name': SearchDatastore_Task, 'duration_secs': 0.010979} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.795459] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.795747] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 85be399c-2482-4a19-b68f-b45aa4e6846b/85be399c-2482-4a19-b68f-b45aa4e6846b.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2170.796066] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-557618d1-ccad-45d6-8cca-e9a4f07417c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.804654] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2170.804654] env[62875]: value = "task-2180370" [ 2170.804654] env[62875]: _type = "Task" [ 2170.804654] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.818921] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.847030] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180367, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.917576] env[62875]: DEBUG nova.network.neutron [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Updated VIF entry in instance network info cache for port 638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2170.918387] env[62875]: DEBUG nova.network.neutron [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Updating instance_info_cache with network_info: [{"id": "638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9", "address": "fa:16:3e:1d:3f:22", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap638fe5f9-1f", "ovs_interfaceid": "638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.050322] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180368, 'name': CreateVM_Task, 'duration_secs': 0.369206} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.050534] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2171.051420] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2171.051593] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2171.052015] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2171.052320] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4c3df4e-2af8-4e61-bc2f-ab1faaaa7208 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.058189] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2171.058189] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fd168a-d734-da99-8f3a-e2b27c59756d" [ 2171.058189] env[62875]: _type = "Task" [ 2171.058189] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.070057] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fd168a-d734-da99-8f3a-e2b27c59756d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.073341] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180369, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.220484] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7b053783-3451-4c5d-84ea-9f8b6f544ed5 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 44.165s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.221546] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 12.286s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.221697] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2171.221914] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.222128] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.224041] env[62875]: INFO nova.compute.manager [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Terminating instance [ 2171.318518] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180370, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.346515] env[62875]: DEBUG oslo_vmware.api [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180367, 'name': PowerOnVM_Task, 'duration_secs': 0.560451} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.346776] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2171.346981] env[62875]: INFO nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Took 7.90 seconds to spawn the instance on the hypervisor. [ 2171.347257] env[62875]: DEBUG nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2171.348061] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85af4cb3-d0fd-4736-bebd-88de3f19102c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.421682] env[62875]: DEBUG oslo_concurrency.lockutils [req-7662c716-5091-4832-94a8-46aa737854a9 req-3685fe5c-8805-4d0f-8e78-b30451307b6e service nova] Releasing lock "refresh_cache-5224c475-8739-4137-82e7-c9d149d41d61" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.575557] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.575822] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Processing image c3db3f4b-7101-488a-81eb-25039e53d59c {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2171.576067] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/c3db3f4b-7101-488a-81eb-25039e53d59c.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2171.576221] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/c3db3f4b-7101-488a-81eb-25039e53d59c.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2171.576403] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2171.576661] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180369, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649775} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.576927] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17c1d12b-629c-4174-89f1-de292ef7f912 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.578692] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf/3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2171.578899] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2171.579178] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c335f1d-b63d-4348-9a97-054fc6a3b3de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.585983] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2171.585983] env[62875]: value = "task-2180371" [ 2171.585983] env[62875]: _type = "Task" [ 2171.585983] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.594972] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180371, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.596875] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2171.597083] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2171.597847] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98684faa-7f5d-4a21-84cb-78dc96c382a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.603123] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2171.603123] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52921dde-84e7-fbf8-4b5a-39fea23ee6be" [ 2171.603123] env[62875]: _type = "Task" [ 2171.603123] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.611210] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52921dde-84e7-fbf8-4b5a-39fea23ee6be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.728245] env[62875]: DEBUG nova.compute.manager [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2171.728525] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2171.728859] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-727feb72-ee84-48d0-bad1-e72228234082 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.741382] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e59ac0c-f2b3-474e-afcd-880b62b77517 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.782652] env[62875]: WARNING nova.virt.vmwareapi.vmops [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7c081de0-1952-4ca8-8f6f-80102e20bff0 could not be found. [ 2171.782856] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2171.783125] env[62875]: INFO nova.compute.manager [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2171.783439] env[62875]: DEBUG oslo.service.loopingcall [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2171.783876] env[62875]: DEBUG nova.compute.manager [-] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2171.783997] env[62875]: DEBUG nova.network.neutron [-] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2171.823579] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180370, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.866687] env[62875]: INFO nova.compute.manager [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Took 23.00 seconds to build instance. [ 2171.967561] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e896f094-41c5-446a-99c6-ece417427619 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.975496] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a6f931-2020-4c00-a099-2cbc310f4599 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.006360] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627adeec-5f3d-413c-aafc-1600632876a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.013753] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b0c743-4367-4328-8332-e49d31e62694 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.030156] env[62875]: DEBUG nova.compute.provider_tree [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2172.096487] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180371, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091477} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.096761] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2172.097813] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a15286b-1a11-4aa5-b241-94a4911a722d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.122437] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf/3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2172.126132] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2010349-7034-4e23-991d-65fac3154584 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.149858] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Preparing fetch location {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2172.150139] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Fetch image to [datastore1] OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27/OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27.vmdk {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2172.150405] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Downloading stream optimized image c3db3f4b-7101-488a-81eb-25039e53d59c to [datastore1] OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27/OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27.vmdk on the data store datastore1 as vApp {{(pid=62875) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2172.150593] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Downloading image file data c3db3f4b-7101-488a-81eb-25039e53d59c to the ESX as VM named 'OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27' {{(pid=62875) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2172.152673] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2172.152673] env[62875]: value = "task-2180372" [ 2172.152673] env[62875]: _type = "Task" [ 2172.152673] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.159967] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cfc228-a602-419c-bd82-40fbbdfd1434 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.168163] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.184628] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance '85f7c7dc-03c4-44ff-8502-cf61ee7c3af9' progress to 0 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2172.248098] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2172.248098] env[62875]: value = "resgroup-9" [ 2172.248098] env[62875]: _type = "ResourcePool" [ 2172.248098] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2172.248854] env[62875]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-ed990839-13e2-4c71-8760-77f6d835be1b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.270010] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lease: (returnval){ [ 2172.270010] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2172.270010] env[62875]: _type = "HttpNfcLease" [ 2172.270010] env[62875]: } obtained for vApp import into resource pool (val){ [ 2172.270010] env[62875]: value = "resgroup-9" [ 2172.270010] env[62875]: _type = "ResourcePool" [ 2172.270010] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2172.270316] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the lease: (returnval){ [ 2172.270316] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2172.270316] env[62875]: _type = "HttpNfcLease" [ 2172.270316] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2172.277242] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2172.277242] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2172.277242] env[62875]: _type = "HttpNfcLease" [ 2172.277242] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2172.317573] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180370, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.178956} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.317826] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 85be399c-2482-4a19-b68f-b45aa4e6846b/85be399c-2482-4a19-b68f-b45aa4e6846b.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2172.318062] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2172.318307] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99b12ab8-3005-46d7-9f9b-05dea1b842d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.323998] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2172.323998] env[62875]: value = "task-2180374" [ 2172.323998] env[62875]: _type = "Task" [ 2172.323998] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.332611] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.368902] env[62875]: DEBUG oslo_concurrency.lockutils [None req-da90d521-5be5-4460-896d-64b5e708b4e1 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.514s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.534132] env[62875]: DEBUG nova.scheduler.client.report [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2172.644841] env[62875]: DEBUG nova.network.neutron [-] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2172.671572] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.695725] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2172.696175] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d3f7d19-c16a-465c-a0ce-9c70040d57af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.709737] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2172.709737] env[62875]: value = "task-2180375" [ 2172.709737] env[62875]: _type = "Task" [ 2172.709737] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.721566] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.779428] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2172.779428] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2172.779428] env[62875]: _type = "HttpNfcLease" [ 2172.779428] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2172.834847] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.253106} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.835232] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2172.836109] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470325a8-e9b6-4393-b019-8e16b2cb9bb5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.859648] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 85be399c-2482-4a19-b68f-b45aa4e6846b/85be399c-2482-4a19-b68f-b45aa4e6846b.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2172.859977] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-192d727f-9c26-4244-a87b-01e17840c644 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.879685] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2172.879685] env[62875]: value = "task-2180376" [ 2172.879685] env[62875]: _type = "Task" [ 2172.879685] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.887881] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180376, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.038901] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2173.039630] env[62875]: DEBUG nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2173.150575] env[62875]: INFO nova.compute.manager [-] [instance: 7c081de0-1952-4ca8-8f6f-80102e20bff0] Took 1.37 seconds to deallocate network for instance. [ 2173.165442] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180372, 'name': ReconfigVM_Task, 'duration_secs': 0.852189} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.165725] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf/3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2173.166348] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66c5327e-f8c8-4a1d-8cdf-56c5e93f3a2b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.172856] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2173.172856] env[62875]: value = "task-2180377" [ 2173.172856] env[62875]: _type = "Task" [ 2173.172856] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.181181] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180377, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.219407] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180375, 'name': PowerOffVM_Task, 'duration_secs': 0.366782} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.219718] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2173.219996] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance '85f7c7dc-03c4-44ff-8502-cf61ee7c3af9' progress to 17 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2173.279705] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2173.279705] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2173.279705] env[62875]: _type = "HttpNfcLease" [ 2173.279705] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2173.390186] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.544643] env[62875]: DEBUG nova.compute.utils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2173.546476] env[62875]: DEBUG nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2173.546771] env[62875]: DEBUG nova.network.neutron [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2173.596965] env[62875]: DEBUG nova.policy [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3d0e175791341aea0db00ef8a1b5680', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '226340868e7446cca12688a32d13c630', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2173.688686] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180377, 'name': Rename_Task, 'duration_secs': 0.203843} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.688686] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2173.688686] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-753658c5-016c-4736-90f3-121651c036b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.695775] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2173.695775] env[62875]: value = "task-2180378" [ 2173.695775] env[62875]: _type = "Task" [ 2173.695775] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.704443] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180378, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.727410] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2173.727864] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2173.728015] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2173.728274] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2173.728477] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2173.728629] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2173.728886] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2173.729123] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2173.729368] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2173.729573] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2173.729796] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2173.736453] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77735d6d-cbd3-4d8d-bbf6-e7575d9439bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.752966] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2173.752966] env[62875]: value = "task-2180379" [ 2173.752966] env[62875]: _type = "Task" [ 2173.752966] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.761645] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180379, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.779105] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2173.779105] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2173.779105] env[62875]: _type = "HttpNfcLease" [ 2173.779105] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2173.889951] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180376, 'name': ReconfigVM_Task, 'duration_secs': 0.634591} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.890450] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 85be399c-2482-4a19-b68f-b45aa4e6846b/85be399c-2482-4a19-b68f-b45aa4e6846b.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2173.891018] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f52d5099-7fd2-4dcd-b648-a6e7497c5d2c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.897547] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2173.897547] env[62875]: value = "task-2180380" [ 2173.897547] env[62875]: _type = "Task" [ 2173.897547] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.898303] env[62875]: DEBUG nova.network.neutron [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Successfully created port: b1da1efe-c5ed-4fe3-8cfb-ef705696f13b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2173.909152] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180380, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.050272] env[62875]: DEBUG nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2174.186407] env[62875]: DEBUG oslo_concurrency.lockutils [None req-00e8d51e-534f-4d90-a710-647a53d780dd tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "7c081de0-1952-4ca8-8f6f-80102e20bff0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.965s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2174.206954] env[62875]: DEBUG oslo_vmware.api [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180378, 'name': PowerOnVM_Task, 'duration_secs': 0.455086} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.206954] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2174.207132] env[62875]: DEBUG nova.compute.manager [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2174.208236] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ec9070-740b-4f35-a056-efdae80d62ab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.262947] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180379, 'name': ReconfigVM_Task, 'duration_secs': 0.216388} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.264409] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance '85f7c7dc-03c4-44ff-8502-cf61ee7c3af9' progress to 33 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2174.279806] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2174.279806] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2174.279806] env[62875]: _type = "HttpNfcLease" [ 2174.279806] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2174.410883] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180380, 'name': Rename_Task, 'duration_secs': 0.178656} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.411222] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2174.411445] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa0796a7-2b37-4408-9fd1-90fcfd8ad286 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.417673] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2174.417673] env[62875]: value = "task-2180381" [ 2174.417673] env[62875]: _type = "Task" [ 2174.417673] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.427589] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.728401] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.728704] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.728937] env[62875]: DEBUG nova.objects.instance [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62875) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2174.770044] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2174.770345] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2174.770423] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2174.770568] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2174.770746] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2174.770967] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2174.771223] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2174.771398] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2174.771567] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2174.771732] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2174.771906] env[62875]: DEBUG nova.virt.hardware [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2174.777467] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Reconfiguring VM instance instance-00000046 to detach disk 2000 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2174.778541] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8fa0c38-2bc0-4329-8c86-d6a3346e807a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.799871] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2174.799871] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2174.799871] env[62875]: _type = "HttpNfcLease" [ 2174.799871] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2174.802035] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2174.802035] env[62875]: value = "task-2180382" [ 2174.802035] env[62875]: _type = "Task" [ 2174.802035] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.811133] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180382, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.930141] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180381, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.060713] env[62875]: DEBUG nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2175.087075] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2175.087593] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2175.089026] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2175.089026] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2175.089026] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2175.089356] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2175.089579] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2175.089834] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2175.090145] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2175.091729] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2175.091729] env[62875]: DEBUG nova.virt.hardware [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2175.092110] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c35bd0d-f92a-464f-9d5f-10976735e5bb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.102726] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9079f82-9f11-41d4-87af-6b9542982981 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.131032] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.131453] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.131685] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.131929] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.132219] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.135944] env[62875]: INFO nova.compute.manager [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Terminating instance [ 2175.287036] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2175.287036] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2175.287036] env[62875]: _type = "HttpNfcLease" [ 2175.287036] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2175.287394] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2175.287394] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521b694d-37e5-c743-d5f9-ceb329ae82a1" [ 2175.287394] env[62875]: _type = "HttpNfcLease" [ 2175.287394] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2175.288077] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53886715-48a8-4cd9-8431-bb7eaf3f40e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.295384] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5276fd34-cc62-c054-2fb5-123112a668ee/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2175.295564] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5276fd34-cc62-c054-2fb5-123112a668ee/disk-0.vmdk. {{(pid=62875) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2175.362796] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b729dceb-e9f3-4c3a-8cae-a7e29bcaf4be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.364693] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180382, 'name': ReconfigVM_Task, 'duration_secs': 0.232074} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.364955] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Reconfigured VM instance instance-00000046 to detach disk 2000 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2175.366141] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14294d2-2ade-4277-99ba-48113bfcb3e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.389883] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9/85f7c7dc-03c4-44ff-8502-cf61ee7c3af9.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2175.391042] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ee228eb-48cd-4ef5-b9b9-e2daccfcb7ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.409259] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2175.409259] env[62875]: value = "task-2180383" [ 2175.409259] env[62875]: _type = "Task" [ 2175.409259] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.417880] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.429150] env[62875]: DEBUG oslo_vmware.api [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180381, 'name': PowerOnVM_Task, 'duration_secs': 0.695601} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.429450] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2175.429654] env[62875]: INFO nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Took 9.66 seconds to spawn the instance on the hypervisor. [ 2175.429830] env[62875]: DEBUG nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2175.430675] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27f1307-ebf4-425e-990f-6b8494b10e2d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.604981] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "37493633-c100-44d8-b1a1-8d462733ba41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.605376] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "37493633-c100-44d8-b1a1-8d462733ba41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.639264] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "refresh_cache-3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2175.639545] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquired lock "refresh_cache-3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2175.639701] env[62875]: DEBUG nova.network.neutron [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2175.740961] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e853ad1b-573b-4612-8948-9add359aa3f1 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.921486] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180383, 'name': ReconfigVM_Task, 'duration_secs': 0.385575} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.923268] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9/85f7c7dc-03c4-44ff-8502-cf61ee7c3af9.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2175.923618] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance '85f7c7dc-03c4-44ff-8502-cf61ee7c3af9' progress to 50 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2175.949338] env[62875]: INFO nova.compute.manager [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Took 26.76 seconds to build instance. [ 2176.108074] env[62875]: DEBUG nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2176.162698] env[62875]: DEBUG nova.network.neutron [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2176.217731] env[62875]: DEBUG nova.network.neutron [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.257036] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Completed reading data from the image iterator. {{(pid=62875) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2176.257188] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5276fd34-cc62-c054-2fb5-123112a668ee/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2176.258090] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d2cfdb-93c6-44d3-8075-5590be8322e4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.266072] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5276fd34-cc62-c054-2fb5-123112a668ee/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2176.266230] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5276fd34-cc62-c054-2fb5-123112a668ee/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2176.266746] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-ba977cab-ee01-412e-935b-f4d52556c8a0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.433805] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe9a611-07f7-4681-a281-ad85148e0195 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.454774] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7fb17a40-4e0c-4efe-b9f5-a82e90c9d94a tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "85be399c-2482-4a19-b68f-b45aa4e6846b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.272s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2176.456800] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c47836a-b38f-4ede-9407-27dfd022e2a3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.459638] env[62875]: DEBUG oslo_vmware.rw_handles [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5276fd34-cc62-c054-2fb5-123112a668ee/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2176.459855] env[62875]: INFO nova.virt.vmwareapi.images [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Downloaded image file data c3db3f4b-7101-488a-81eb-25039e53d59c [ 2176.460599] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbfea3b-3ee4-4eca-9eef-27cd19de8efb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.489701] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1438dcf3-d317-46c0-ae5e-e9d290d32459 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.491857] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance '85f7c7dc-03c4-44ff-8502-cf61ee7c3af9' progress to 67 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2176.521018] env[62875]: INFO nova.virt.vmwareapi.images [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] The imported VM was unregistered [ 2176.521018] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Caching image {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2176.521018] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating directory with path [datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2176.521018] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05c2ea28-35e6-4b94-98e0-3a9a4fa5488c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.530251] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Created directory with path [datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2176.530448] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27/OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27.vmdk to [datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/c3db3f4b-7101-488a-81eb-25039e53d59c.vmdk. {{(pid=62875) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2176.530701] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-a3b7f9c3-322e-4756-8a3c-e2c541934e35 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.537660] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2176.537660] env[62875]: value = "task-2180385" [ 2176.537660] env[62875]: _type = "Task" [ 2176.537660] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.548278] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180385, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.631511] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2176.631769] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2176.633335] env[62875]: INFO nova.compute.claims [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2176.720495] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Releasing lock "refresh_cache-3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2176.721221] env[62875]: DEBUG nova.compute.manager [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2176.721491] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2176.722678] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefd07ce-e7d1-410c-a5b6-092f1d620284 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.732273] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2176.732591] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1781ec68-bf00-48e4-aca4-8ec9d50c9d15 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.739631] env[62875]: DEBUG oslo_vmware.api [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2176.739631] env[62875]: value = "task-2180386" [ 2176.739631] env[62875]: _type = "Task" [ 2176.739631] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.748501] env[62875]: DEBUG oslo_vmware.api [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.036466] env[62875]: DEBUG nova.network.neutron [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Port 2c18fe19-36d2-4a2f-8c64-e8268acfc359 binding to destination host cpu-1 is already ACTIVE {{(pid=62875) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2177.055374] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180385, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.249339] env[62875]: DEBUG oslo_vmware.api [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180386, 'name': PowerOffVM_Task, 'duration_secs': 0.223285} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.249729] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2177.249938] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2177.250260] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-febcf6a2-8d09-40f4-bf62-a5c12d8e7697 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.275279] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2177.275362] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2177.275492] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Deleting the datastore file [datastore2] 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2177.275774] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fe5742c-165f-4bdb-9518-c145f32428b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.281902] env[62875]: DEBUG oslo_vmware.api [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for the task: (returnval){ [ 2177.281902] env[62875]: value = "task-2180388" [ 2177.281902] env[62875]: _type = "Task" [ 2177.281902] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.290518] env[62875]: DEBUG oslo_vmware.api [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180388, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.557137] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180385, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.797613] env[62875]: DEBUG oslo_vmware.api [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Task: {'id': task-2180388, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285211} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.801428] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2177.801737] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2177.801925] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2177.802124] env[62875]: INFO nova.compute.manager [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2177.802867] env[62875]: DEBUG oslo.service.loopingcall [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2177.802867] env[62875]: DEBUG nova.compute.manager [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2177.802999] env[62875]: DEBUG nova.network.neutron [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2177.819031] env[62875]: DEBUG nova.network.neutron [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2177.876618] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158945dc-56e9-44a6-8ec8-1790e27da870 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.884286] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac90cda7-86ec-4a5f-8eec-204358745391 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.915866] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe784441-8283-4a4f-abff-1575c377908b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.923778] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40aed15f-14f7-4df9-b9db-ed2593e7d7c6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.938055] env[62875]: DEBUG nova.compute.provider_tree [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2178.067929] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.068186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.068399] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.073469] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180385, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.242822] env[62875]: DEBUG oslo_concurrency.lockutils [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "2dd748c2-048d-4450-a393-995249a9deb8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.243264] env[62875]: DEBUG oslo_concurrency.lockutils [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "2dd748c2-048d-4450-a393-995249a9deb8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.243546] env[62875]: DEBUG nova.compute.manager [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2178.244537] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f14040a-8818-4a66-8d02-5b2dbabe92b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.252195] env[62875]: DEBUG nova.compute.manager [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62875) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2178.252774] env[62875]: DEBUG nova.objects.instance [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lazy-loading 'flavor' on Instance uuid 2dd748c2-048d-4450-a393-995249a9deb8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2178.322460] env[62875]: DEBUG nova.network.neutron [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2178.441836] env[62875]: DEBUG nova.scheduler.client.report [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2178.557145] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180385, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.825604] env[62875]: INFO nova.compute.manager [-] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Took 1.02 seconds to deallocate network for instance. [ 2178.948313] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.316s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.948898] env[62875]: DEBUG nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2179.058741] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180385, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.315285} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.058994] env[62875]: INFO nova.virt.vmwareapi.ds_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27/OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27.vmdk to [datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/c3db3f4b-7101-488a-81eb-25039e53d59c.vmdk. [ 2179.059213] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Cleaning up location [datastore1] OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2179.059379] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_404a637d-4153-4336-994f-833bfb524c27 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2179.059624] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-189426b4-bce1-4db1-bb7e-dadc9e2ef580 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.065656] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2179.065656] env[62875]: value = "task-2180389" [ 2179.065656] env[62875]: _type = "Task" [ 2179.065656] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.073060] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.125146] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2179.125363] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2179.125546] env[62875]: DEBUG nova.network.neutron [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2179.260743] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2179.261600] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2469bb2b-dafe-43c4-a8f4-0baa0f11dbed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.268738] env[62875]: DEBUG oslo_vmware.api [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2179.268738] env[62875]: value = "task-2180390" [ 2179.268738] env[62875]: _type = "Task" [ 2179.268738] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.278033] env[62875]: DEBUG oslo_vmware.api [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.333996] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.334306] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.334542] env[62875]: DEBUG nova.objects.instance [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lazy-loading 'resources' on Instance uuid 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2179.454445] env[62875]: DEBUG nova.compute.utils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2179.455989] env[62875]: DEBUG nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2179.457058] env[62875]: DEBUG nova.network.neutron [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2179.498368] env[62875]: DEBUG nova.policy [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52db0a44319f46939b47247136267ceb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5062c761ea34842a2f6179ae76f3465', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2179.575584] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033643} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.575863] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2179.576046] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/c3db3f4b-7101-488a-81eb-25039e53d59c.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2179.576297] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/c3db3f4b-7101-488a-81eb-25039e53d59c.vmdk to [datastore1] 5224c475-8739-4137-82e7-c9d149d41d61/5224c475-8739-4137-82e7-c9d149d41d61.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2179.576574] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ac9f32f-bc83-4e9e-9dce-d668152eaaaf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.583368] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2179.583368] env[62875]: value = "task-2180391" [ 2179.583368] env[62875]: _type = "Task" [ 2179.583368] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.592410] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.779630] env[62875]: DEBUG oslo_vmware.api [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180390, 'name': PowerOffVM_Task, 'duration_secs': 0.207413} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.782766] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2179.782992] env[62875]: DEBUG nova.compute.manager [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2179.783882] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcb4595-131d-4e50-990d-d0d80b2165dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.793165] env[62875]: DEBUG nova.network.neutron [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Successfully created port: 877f91f8-adb7-4379-85a2-2dc8b5b95e10 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2179.910605] env[62875]: DEBUG nova.network.neutron [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance_info_cache with network_info: [{"id": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "address": "fa:16:3e:de:04:4b", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c18fe19-36", "ovs_interfaceid": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2179.960767] env[62875]: DEBUG nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2180.075700] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ce26f2-7924-4c3c-ad8b-7f6d28ba596e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.084038] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934ff837-edc1-40b5-be3b-0914f63eabcb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.121430] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccf59ac-e0b9-4dd4-9b9a-a976fafbb74d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.124393] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180391, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.132780] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606ca440-01ef-438f-9d7c-713fe73b201c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.149294] env[62875]: DEBUG nova.compute.provider_tree [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2180.300469] env[62875]: DEBUG oslo_concurrency.lockutils [None req-225e7dc3-1202-405e-b655-325d2953e059 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "2dd748c2-048d-4450-a393-995249a9deb8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.413939] env[62875]: DEBUG oslo_concurrency.lockutils [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2180.599012] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180391, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.653034] env[62875]: DEBUG nova.scheduler.client.report [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2180.692445] env[62875]: DEBUG nova.objects.instance [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lazy-loading 'flavor' on Instance uuid 2dd748c2-048d-4450-a393-995249a9deb8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2180.939884] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cdf656-55a0-4ab0-aabb-cf579e02b63b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.964147] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3059b9a1-0a0d-4657-9008-6772832847e6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.971091] env[62875]: DEBUG nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2180.977279] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance '85f7c7dc-03c4-44ff-8502-cf61ee7c3af9' progress to 83 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2180.999266] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2180.999803] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2180.999803] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2180.999930] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2181.000021] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2181.000274] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2181.000541] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2181.000710] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2181.001743] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2181.002342] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2181.002386] env[62875]: DEBUG nova.virt.hardware [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2181.003393] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479c9778-9832-45dc-ac7c-4556ea5d64d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.014136] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9700f07-8aa4-49cb-8668-fa960008fdd0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.100894] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180391, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.158062] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.824s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.178071] env[62875]: INFO nova.scheduler.client.report [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Deleted allocations for instance 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf [ 2181.197705] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2181.197889] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquired lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2181.198470] env[62875]: DEBUG nova.network.neutron [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2181.198470] env[62875]: DEBUG nova.objects.instance [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lazy-loading 'info_cache' on Instance uuid 2dd748c2-048d-4450-a393-995249a9deb8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2181.484391] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2181.484745] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f511ffe8-8782-4e9d-b301-f5b038d5e3af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.495287] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2181.495287] env[62875]: value = "task-2180392" [ 2181.495287] env[62875]: _type = "Task" [ 2181.495287] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.506780] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180392, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.599752] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180391, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.686712] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4a55599c-7fa5-4004-8b64-2721dbc559d4 tempest-ServerShowV257Test-1786094502 tempest-ServerShowV257Test-1786094502-project-member] Lock "3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.555s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.702536] env[62875]: DEBUG nova.objects.base [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Object Instance<2dd748c2-048d-4450-a393-995249a9deb8> lazy-loaded attributes: flavor,info_cache {{(pid=62875) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2182.004950] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180392, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.099342] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180391, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.355458} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.099719] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/c3db3f4b-7101-488a-81eb-25039e53d59c.vmdk to [datastore1] 5224c475-8739-4137-82e7-c9d149d41d61/5224c475-8739-4137-82e7-c9d149d41d61.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2182.100539] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd58302d-82f3-410a-8894-424d42006958 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.122262] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 5224c475-8739-4137-82e7-c9d149d41d61/5224c475-8739-4137-82e7-c9d149d41d61.vmdk or device None with type streamOptimized {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2182.122527] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff07f043-bcac-48dd-bcca-1835601c1054 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.142950] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2182.142950] env[62875]: value = "task-2180393" [ 2182.142950] env[62875]: _type = "Task" [ 2182.142950] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.150980] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180393, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.434052] env[62875]: DEBUG nova.network.neutron [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Updating instance_info_cache with network_info: [{"id": "846fd804-bdbf-498c-a3f7-9741200ee2d4", "address": "fa:16:3e:54:d5:c6", "network": {"id": "4da1ba0a-8f3e-4567-a332-54b25e34b578", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-683175684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c27ce2b0bc554605a2ea3606d1e182ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap846fd804-bd", "ovs_interfaceid": "846fd804-bdbf-498c-a3f7-9741200ee2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2182.505021] env[62875]: DEBUG oslo_vmware.api [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180392, 'name': PowerOnVM_Task, 'duration_secs': 0.954676} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.505348] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2182.505411] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-16ea4a63-b76f-451b-9a8c-fb6924398e9e tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance '85f7c7dc-03c4-44ff-8502-cf61ee7c3af9' progress to 100 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2182.653294] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.936794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Releasing lock "refresh_cache-2dd748c2-048d-4450-a393-995249a9deb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.156127] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180393, 'name': ReconfigVM_Task, 'duration_secs': 0.92455} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.156475] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 5224c475-8739-4137-82e7-c9d149d41d61/5224c475-8739-4137-82e7-c9d149d41d61.vmdk or device None with type streamOptimized {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2183.157068] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e96cf3b9-4bf1-422d-8dc9-d8b3b9ffb5c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.162749] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2183.162749] env[62875]: value = "task-2180394" [ 2183.162749] env[62875]: _type = "Task" [ 2183.162749] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.170838] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180394, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.673672] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180394, 'name': Rename_Task, 'duration_secs': 0.122648} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.673809] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2183.673950] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9646aca-8e0f-4f4a-b71b-2ae7bccc77cb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.681113] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2183.681113] env[62875]: value = "task-2180395" [ 2183.681113] env[62875]: _type = "Task" [ 2183.681113] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.689960] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180395, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.943498] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2183.944139] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ad21060-5855-4886-89fe-e7f82f1f828f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.951404] env[62875]: DEBUG oslo_vmware.api [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2183.951404] env[62875]: value = "task-2180396" [ 2183.951404] env[62875]: _type = "Task" [ 2183.951404] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.959726] env[62875]: DEBUG oslo_vmware.api [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180396, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.192344] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180395, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.229164] env[62875]: DEBUG nova.compute.manager [req-b4bb7afb-376a-4311-8333-6474ca0053f4 req-ccd01ae2-bc40-473c-9247-bb11bbd24b09 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Received event network-vif-plugged-877f91f8-adb7-4379-85a2-2dc8b5b95e10 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2184.229414] env[62875]: DEBUG oslo_concurrency.lockutils [req-b4bb7afb-376a-4311-8333-6474ca0053f4 req-ccd01ae2-bc40-473c-9247-bb11bbd24b09 service nova] Acquiring lock "37493633-c100-44d8-b1a1-8d462733ba41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.229638] env[62875]: DEBUG oslo_concurrency.lockutils [req-b4bb7afb-376a-4311-8333-6474ca0053f4 req-ccd01ae2-bc40-473c-9247-bb11bbd24b09 service nova] Lock "37493633-c100-44d8-b1a1-8d462733ba41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.229811] env[62875]: DEBUG oslo_concurrency.lockutils [req-b4bb7afb-376a-4311-8333-6474ca0053f4 req-ccd01ae2-bc40-473c-9247-bb11bbd24b09 service nova] Lock "37493633-c100-44d8-b1a1-8d462733ba41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.229983] env[62875]: DEBUG nova.compute.manager [req-b4bb7afb-376a-4311-8333-6474ca0053f4 req-ccd01ae2-bc40-473c-9247-bb11bbd24b09 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] No waiting events found dispatching network-vif-plugged-877f91f8-adb7-4379-85a2-2dc8b5b95e10 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2184.231706] env[62875]: WARNING nova.compute.manager [req-b4bb7afb-376a-4311-8333-6474ca0053f4 req-ccd01ae2-bc40-473c-9247-bb11bbd24b09 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Received unexpected event network-vif-plugged-877f91f8-adb7-4379-85a2-2dc8b5b95e10 for instance with vm_state building and task_state spawning. [ 2184.316168] env[62875]: DEBUG nova.network.neutron [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Successfully updated port: 877f91f8-adb7-4379-85a2-2dc8b5b95e10 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2184.395255] env[62875]: DEBUG nova.compute.manager [req-f9e7bb3d-f80b-45ad-b21e-542470727b9f req-95924c89-8f15-45a8-9497-719dadeb32b3 service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Received event network-vif-plugged-b1da1efe-c5ed-4fe3-8cfb-ef705696f13b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2184.395872] env[62875]: DEBUG oslo_concurrency.lockutils [req-f9e7bb3d-f80b-45ad-b21e-542470727b9f req-95924c89-8f15-45a8-9497-719dadeb32b3 service nova] Acquiring lock "cb4941dc-1690-46b5-93f9-407198fc1332-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.396119] env[62875]: DEBUG oslo_concurrency.lockutils [req-f9e7bb3d-f80b-45ad-b21e-542470727b9f req-95924c89-8f15-45a8-9497-719dadeb32b3 service nova] Lock "cb4941dc-1690-46b5-93f9-407198fc1332-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.396314] env[62875]: DEBUG oslo_concurrency.lockutils [req-f9e7bb3d-f80b-45ad-b21e-542470727b9f req-95924c89-8f15-45a8-9497-719dadeb32b3 service nova] Lock "cb4941dc-1690-46b5-93f9-407198fc1332-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.396568] env[62875]: DEBUG nova.compute.manager [req-f9e7bb3d-f80b-45ad-b21e-542470727b9f req-95924c89-8f15-45a8-9497-719dadeb32b3 service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] No waiting events found dispatching network-vif-plugged-b1da1efe-c5ed-4fe3-8cfb-ef705696f13b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2184.396752] env[62875]: WARNING nova.compute.manager [req-f9e7bb3d-f80b-45ad-b21e-542470727b9f req-95924c89-8f15-45a8-9497-719dadeb32b3 service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Received unexpected event network-vif-plugged-b1da1efe-c5ed-4fe3-8cfb-ef705696f13b for instance with vm_state building and task_state spawning. [ 2184.467307] env[62875]: DEBUG oslo_vmware.api [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180396, 'name': PowerOnVM_Task, 'duration_secs': 0.46564} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.467657] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2184.467888] env[62875]: DEBUG nova.compute.manager [None req-bb92034f-7fae-463e-b5e9-7b6631336b15 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2184.468754] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f736931-f085-4d00-b74e-906ef4005b0c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.536897] env[62875]: DEBUG nova.network.neutron [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Successfully updated port: b1da1efe-c5ed-4fe3-8cfb-ef705696f13b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2184.694567] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180395, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.818475] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-37493633-c100-44d8-b1a1-8d462733ba41" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.818641] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-37493633-c100-44d8-b1a1-8d462733ba41" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.818791] env[62875]: DEBUG nova.network.neutron [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2185.031497] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.031773] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.031964] env[62875]: DEBUG nova.compute.manager [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Going to confirm migration 1 {{(pid=62875) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2185.039645] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "refresh_cache-cb4941dc-1690-46b5-93f9-407198fc1332" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2185.039796] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "refresh_cache-cb4941dc-1690-46b5-93f9-407198fc1332" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2185.039950] env[62875]: DEBUG nova.network.neutron [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2185.193272] env[62875]: DEBUG oslo_vmware.api [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180395, 'name': PowerOnVM_Task, 'duration_secs': 1.356115} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.193622] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2185.193835] env[62875]: INFO nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Took 17.09 seconds to spawn the instance on the hypervisor. [ 2185.193986] env[62875]: DEBUG nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2185.194757] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798f09c2-037d-4f7d-bd2f-30b7e72991f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.384170] env[62875]: DEBUG nova.network.neutron [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2185.543848] env[62875]: DEBUG nova.network.neutron [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Updating instance_info_cache with network_info: [{"id": "877f91f8-adb7-4379-85a2-2dc8b5b95e10", "address": "fa:16:3e:8d:34:08", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap877f91f8-ad", "ovs_interfaceid": "877f91f8-adb7-4379-85a2-2dc8b5b95e10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.591357] env[62875]: DEBUG nova.network.neutron [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2185.630867] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2185.631074] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2185.631253] env[62875]: DEBUG nova.network.neutron [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2185.631476] env[62875]: DEBUG nova.objects.instance [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lazy-loading 'info_cache' on Instance uuid 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2185.713934] env[62875]: INFO nova.compute.manager [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Took 35.37 seconds to build instance. [ 2185.844718] env[62875]: DEBUG nova.network.neutron [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Updating instance_info_cache with network_info: [{"id": "b1da1efe-c5ed-4fe3-8cfb-ef705696f13b", "address": "fa:16:3e:1f:fd:5c", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1da1efe-c5", "ovs_interfaceid": "b1da1efe-c5ed-4fe3-8cfb-ef705696f13b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2186.049371] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-37493633-c100-44d8-b1a1-8d462733ba41" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2186.049371] env[62875]: DEBUG nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Instance network_info: |[{"id": "877f91f8-adb7-4379-85a2-2dc8b5b95e10", "address": "fa:16:3e:8d:34:08", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap877f91f8-ad", "ovs_interfaceid": "877f91f8-adb7-4379-85a2-2dc8b5b95e10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2186.049371] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:34:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b2049d7-f99e-425a-afdb-2c95ca88e483', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '877f91f8-adb7-4379-85a2-2dc8b5b95e10', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2186.057436] env[62875]: DEBUG oslo.service.loopingcall [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2186.058070] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2186.058320] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2809fa9-ae0c-4755-8f90-8c6c7dcc3350 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.078874] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2186.078874] env[62875]: value = "task-2180397" [ 2186.078874] env[62875]: _type = "Task" [ 2186.078874] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.087396] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180397, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.216478] env[62875]: DEBUG oslo_concurrency.lockutils [None req-081bc3f9-2676-4cca-96ab-57dcf802bd36 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "5224c475-8739-4137-82e7-c9d149d41d61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.882s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.346508] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "refresh_cache-cb4941dc-1690-46b5-93f9-407198fc1332" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2186.347140] env[62875]: DEBUG nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Instance network_info: |[{"id": "b1da1efe-c5ed-4fe3-8cfb-ef705696f13b", "address": "fa:16:3e:1f:fd:5c", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1da1efe-c5", "ovs_interfaceid": "b1da1efe-c5ed-4fe3-8cfb-ef705696f13b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2186.348051] env[62875]: DEBUG nova.compute.manager [req-41e8b0a4-6de5-4fa8-ac5f-3ea203ccd02b req-b58a717c-76c0-467c-9d4a-bb6acf946190 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Received event network-changed-877f91f8-adb7-4379-85a2-2dc8b5b95e10 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2186.348245] env[62875]: DEBUG nova.compute.manager [req-41e8b0a4-6de5-4fa8-ac5f-3ea203ccd02b req-b58a717c-76c0-467c-9d4a-bb6acf946190 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Refreshing instance network info cache due to event network-changed-877f91f8-adb7-4379-85a2-2dc8b5b95e10. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2186.348471] env[62875]: DEBUG oslo_concurrency.lockutils [req-41e8b0a4-6de5-4fa8-ac5f-3ea203ccd02b req-b58a717c-76c0-467c-9d4a-bb6acf946190 service nova] Acquiring lock "refresh_cache-37493633-c100-44d8-b1a1-8d462733ba41" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.348622] env[62875]: DEBUG oslo_concurrency.lockutils [req-41e8b0a4-6de5-4fa8-ac5f-3ea203ccd02b req-b58a717c-76c0-467c-9d4a-bb6acf946190 service nova] Acquired lock "refresh_cache-37493633-c100-44d8-b1a1-8d462733ba41" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.348788] env[62875]: DEBUG nova.network.neutron [req-41e8b0a4-6de5-4fa8-ac5f-3ea203ccd02b req-b58a717c-76c0-467c-9d4a-bb6acf946190 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Refreshing network info cache for port 877f91f8-adb7-4379-85a2-2dc8b5b95e10 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2186.350333] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:fd:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1da1efe-c5ed-4fe3-8cfb-ef705696f13b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2186.359765] env[62875]: DEBUG oslo.service.loopingcall [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2186.360912] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2186.361149] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47bb861f-e8a9-4e2b-b7df-4720a91e1afc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.381728] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2186.381728] env[62875]: value = "task-2180398" [ 2186.381728] env[62875]: _type = "Task" [ 2186.381728] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.422686] env[62875]: DEBUG nova.compute.manager [req-40d897b2-e308-403e-be22-2d6290dea487 req-342bb322-e235-4697-9154-68d8abc0b144 service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Received event network-changed-b1da1efe-c5ed-4fe3-8cfb-ef705696f13b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2186.422760] env[62875]: DEBUG nova.compute.manager [req-40d897b2-e308-403e-be22-2d6290dea487 req-342bb322-e235-4697-9154-68d8abc0b144 service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Refreshing instance network info cache due to event network-changed-b1da1efe-c5ed-4fe3-8cfb-ef705696f13b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2186.423079] env[62875]: DEBUG oslo_concurrency.lockutils [req-40d897b2-e308-403e-be22-2d6290dea487 req-342bb322-e235-4697-9154-68d8abc0b144 service nova] Acquiring lock "refresh_cache-cb4941dc-1690-46b5-93f9-407198fc1332" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.423079] env[62875]: DEBUG oslo_concurrency.lockutils [req-40d897b2-e308-403e-be22-2d6290dea487 req-342bb322-e235-4697-9154-68d8abc0b144 service nova] Acquired lock "refresh_cache-cb4941dc-1690-46b5-93f9-407198fc1332" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.423244] env[62875]: DEBUG nova.network.neutron [req-40d897b2-e308-403e-be22-2d6290dea487 req-342bb322-e235-4697-9154-68d8abc0b144 service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Refreshing network info cache for port b1da1efe-c5ed-4fe3-8cfb-ef705696f13b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2186.492969] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "5224c475-8739-4137-82e7-c9d149d41d61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.493259] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "5224c475-8739-4137-82e7-c9d149d41d61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.493482] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "5224c475-8739-4137-82e7-c9d149d41d61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.493780] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "5224c475-8739-4137-82e7-c9d149d41d61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.493919] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "5224c475-8739-4137-82e7-c9d149d41d61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.496199] env[62875]: INFO nova.compute.manager [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Terminating instance [ 2186.588816] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180397, 'name': CreateVM_Task, 'duration_secs': 0.356623} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.589048] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2186.589709] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.589939] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.590276] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2186.590535] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2f4317a-2875-46d9-8ca0-39628415f95b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.594901] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2186.594901] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f18f9a-405a-3182-2b69-4c4c3e027b12" [ 2186.594901] env[62875]: _type = "Task" [ 2186.594901] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.603052] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f18f9a-405a-3182-2b69-4c4c3e027b12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.893927] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180398, 'name': CreateVM_Task, 'duration_secs': 0.340125} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.898531] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2186.899524] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.969448] env[62875]: DEBUG nova.network.neutron [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance_info_cache with network_info: [{"id": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "address": "fa:16:3e:de:04:4b", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c18fe19-36", "ovs_interfaceid": "2c18fe19-36d2-4a2f-8c64-e8268acfc359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2187.000387] env[62875]: DEBUG nova.compute.manager [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2187.000608] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2187.001505] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e74688-5375-4920-be36-86ffc7d6f6d1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.009379] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2187.011983] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-937b0ee3-a0e8-4245-a21f-6feb4a468d14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.018882] env[62875]: DEBUG oslo_vmware.api [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2187.018882] env[62875]: value = "task-2180399" [ 2187.018882] env[62875]: _type = "Task" [ 2187.018882] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.026518] env[62875]: DEBUG oslo_vmware.api [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180399, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.085150] env[62875]: DEBUG nova.network.neutron [req-41e8b0a4-6de5-4fa8-ac5f-3ea203ccd02b req-b58a717c-76c0-467c-9d4a-bb6acf946190 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Updated VIF entry in instance network info cache for port 877f91f8-adb7-4379-85a2-2dc8b5b95e10. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2187.085616] env[62875]: DEBUG nova.network.neutron [req-41e8b0a4-6de5-4fa8-ac5f-3ea203ccd02b req-b58a717c-76c0-467c-9d4a-bb6acf946190 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Updating instance_info_cache with network_info: [{"id": "877f91f8-adb7-4379-85a2-2dc8b5b95e10", "address": "fa:16:3e:8d:34:08", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap877f91f8-ad", "ovs_interfaceid": "877f91f8-adb7-4379-85a2-2dc8b5b95e10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2187.106223] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f18f9a-405a-3182-2b69-4c4c3e027b12, 'name': SearchDatastore_Task, 'duration_secs': 0.012276} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.106418] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.106634] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2187.106867] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2187.107019] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2187.107199] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2187.107478] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2187.107778] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2187.107997] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-730b28cb-cd0e-4e2c-802a-cb7c791ee2c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.109952] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cc36d35-b668-442d-8554-9ac630998913 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.115018] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2187.115018] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526b4e37-eaf3-1973-09ba-a82d00e73800" [ 2187.115018] env[62875]: _type = "Task" [ 2187.115018] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.118763] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2187.118940] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2187.119919] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a23e9a44-befc-44c7-82bb-1052a4b0afee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.124924] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526b4e37-eaf3-1973-09ba-a82d00e73800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.127908] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2187.127908] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ad726d-d87f-308a-3522-3e30c6ec8d1b" [ 2187.127908] env[62875]: _type = "Task" [ 2187.127908] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.138332] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ad726d-d87f-308a-3522-3e30c6ec8d1b, 'name': SearchDatastore_Task, 'duration_secs': 0.007823} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.139031] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-605f8a6a-ad11-4f15-8113-d1d6924fd845 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.143795] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2187.143795] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f9d8fa-4d96-e85c-2228-c66ae8dec8b7" [ 2187.143795] env[62875]: _type = "Task" [ 2187.143795] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.151171] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f9d8fa-4d96-e85c-2228-c66ae8dec8b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.167116] env[62875]: DEBUG nova.network.neutron [req-40d897b2-e308-403e-be22-2d6290dea487 req-342bb322-e235-4697-9154-68d8abc0b144 service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Updated VIF entry in instance network info cache for port b1da1efe-c5ed-4fe3-8cfb-ef705696f13b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2187.167485] env[62875]: DEBUG nova.network.neutron [req-40d897b2-e308-403e-be22-2d6290dea487 req-342bb322-e235-4697-9154-68d8abc0b144 service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Updating instance_info_cache with network_info: [{"id": "b1da1efe-c5ed-4fe3-8cfb-ef705696f13b", "address": "fa:16:3e:1f:fd:5c", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1da1efe-c5", "ovs_interfaceid": "b1da1efe-c5ed-4fe3-8cfb-ef705696f13b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2187.264659] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "85be399c-2482-4a19-b68f-b45aa4e6846b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2187.264975] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "85be399c-2482-4a19-b68f-b45aa4e6846b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.265164] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "85be399c-2482-4a19-b68f-b45aa4e6846b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2187.265362] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "85be399c-2482-4a19-b68f-b45aa4e6846b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.265627] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "85be399c-2482-4a19-b68f-b45aa4e6846b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.267754] env[62875]: INFO nova.compute.manager [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Terminating instance [ 2187.472408] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.472677] env[62875]: DEBUG nova.objects.instance [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lazy-loading 'migration_context' on Instance uuid 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2187.528305] env[62875]: DEBUG oslo_vmware.api [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180399, 'name': PowerOffVM_Task, 'duration_secs': 0.239831} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.528606] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2187.528809] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2187.529077] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e85100cd-1753-4992-a4bd-45f36924fd27 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.589063] env[62875]: DEBUG oslo_concurrency.lockutils [req-41e8b0a4-6de5-4fa8-ac5f-3ea203ccd02b req-b58a717c-76c0-467c-9d4a-bb6acf946190 service nova] Releasing lock "refresh_cache-37493633-c100-44d8-b1a1-8d462733ba41" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.625317] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526b4e37-eaf3-1973-09ba-a82d00e73800, 'name': SearchDatastore_Task, 'duration_secs': 0.01912} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.625612] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.625841] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2187.626061] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2187.652871] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f9d8fa-4d96-e85c-2228-c66ae8dec8b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011593} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.653121] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.653372] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 37493633-c100-44d8-b1a1-8d462733ba41/37493633-c100-44d8-b1a1-8d462733ba41.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2187.653642] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2187.653832] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2187.654056] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5dec1e83-fcbf-4f25-b869-f3db131c2e5f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.656010] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b080879-0546-4851-9d12-df172a793059 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.662382] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2187.662382] env[62875]: value = "task-2180401" [ 2187.662382] env[62875]: _type = "Task" [ 2187.662382] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.666201] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2187.666389] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2187.667466] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-477b80b4-01a0-4c1b-be41-9b2c64572d7e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.672744] env[62875]: DEBUG oslo_concurrency.lockutils [req-40d897b2-e308-403e-be22-2d6290dea487 req-342bb322-e235-4697-9154-68d8abc0b144 service nova] Releasing lock "refresh_cache-cb4941dc-1690-46b5-93f9-407198fc1332" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.673125] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.674505] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2187.674704] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2187.674905] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleting the datastore file [datastore1] 5224c475-8739-4137-82e7-c9d149d41d61 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2187.675145] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d74e5ba3-362b-4785-9fd6-441ca93e5fce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.677633] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2187.677633] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525cab68-d036-ab96-c17d-e11f523e6e7f" [ 2187.677633] env[62875]: _type = "Task" [ 2187.677633] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.682191] env[62875]: DEBUG oslo_vmware.api [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2187.682191] env[62875]: value = "task-2180402" [ 2187.682191] env[62875]: _type = "Task" [ 2187.682191] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.688144] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525cab68-d036-ab96-c17d-e11f523e6e7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.692716] env[62875]: DEBUG oslo_vmware.api [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.771539] env[62875]: DEBUG nova.compute.manager [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2187.771760] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2187.772688] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43abc37a-4e44-4bb3-bd90-a3951648f2da {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.780156] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2187.780392] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08df0e07-322e-4fa9-aa40-d010d7b97ad6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.786515] env[62875]: DEBUG oslo_vmware.api [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2187.786515] env[62875]: value = "task-2180403" [ 2187.786515] env[62875]: _type = "Task" [ 2187.786515] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.795339] env[62875]: DEBUG oslo_vmware.api [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.976306] env[62875]: DEBUG nova.objects.base [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Object Instance<85f7c7dc-03c4-44ff-8502-cf61ee7c3af9> lazy-loaded attributes: info_cache,migration_context {{(pid=62875) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2187.977767] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115b8c6b-2ec2-4595-8b29-580387857ee9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.006104] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b8f20fd-bea5-4b99-b5cc-95240dcb46dc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.013561] env[62875]: DEBUG oslo_vmware.api [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2188.013561] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e27498-7b41-777f-d30c-b2c0e8589fb0" [ 2188.013561] env[62875]: _type = "Task" [ 2188.013561] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.023629] env[62875]: DEBUG oslo_vmware.api [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e27498-7b41-777f-d30c-b2c0e8589fb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.171940] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180401, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.188484] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525cab68-d036-ab96-c17d-e11f523e6e7f, 'name': SearchDatastore_Task, 'duration_secs': 0.00993} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.189530] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30e6394a-e297-4bed-8198-7c3ed6365560 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.194389] env[62875]: DEBUG oslo_vmware.api [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168219} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.194922] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2188.195137] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2188.195354] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2188.195583] env[62875]: INFO nova.compute.manager [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2188.195823] env[62875]: DEBUG oslo.service.loopingcall [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2188.196036] env[62875]: DEBUG nova.compute.manager [-] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2188.196140] env[62875]: DEBUG nova.network.neutron [-] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2188.198759] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2188.198759] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522fbd65-dc02-94db-0a6b-90b63ecdec35" [ 2188.198759] env[62875]: _type = "Task" [ 2188.198759] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.206720] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522fbd65-dc02-94db-0a6b-90b63ecdec35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.296436] env[62875]: DEBUG oslo_vmware.api [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180403, 'name': PowerOffVM_Task, 'duration_secs': 0.182365} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.296762] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2188.296869] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2188.297126] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4188ff1e-9311-44cb-9502-e1fc0935cc98 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.524444] env[62875]: DEBUG oslo_vmware.api [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e27498-7b41-777f-d30c-b2c0e8589fb0, 'name': SearchDatastore_Task, 'duration_secs': 0.025716} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.524680] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.524946] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.542468] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2188.542696] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2188.542886] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleting the datastore file [datastore1] 85be399c-2482-4a19-b68f-b45aa4e6846b {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2188.543176] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7190831-8212-4c18-93b7-738a32b30882 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.550320] env[62875]: DEBUG oslo_vmware.api [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2188.550320] env[62875]: value = "task-2180405" [ 2188.550320] env[62875]: _type = "Task" [ 2188.550320] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.558598] env[62875]: DEBUG oslo_vmware.api [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.681978] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57626} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.682289] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 37493633-c100-44d8-b1a1-8d462733ba41/37493633-c100-44d8-b1a1-8d462733ba41.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2188.682557] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2188.683991] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f0590d9-4efe-4e6b-adc2-74fd60e3ec4b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.693968] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2188.693968] env[62875]: value = "task-2180406" [ 2188.693968] env[62875]: _type = "Task" [ 2188.693968] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.701872] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180406, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.710810] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522fbd65-dc02-94db-0a6b-90b63ecdec35, 'name': SearchDatastore_Task, 'duration_secs': 0.041376} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.711077] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2188.711465] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] cb4941dc-1690-46b5-93f9-407198fc1332/cb4941dc-1690-46b5-93f9-407198fc1332.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2188.711815] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-739c3d8f-0fc1-49d9-876a-cb57cfee9445 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.718392] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2188.718392] env[62875]: value = "task-2180407" [ 2188.718392] env[62875]: _type = "Task" [ 2188.718392] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.729102] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.731685] env[62875]: DEBUG nova.compute.manager [req-c1ff85c7-4e14-4883-8cc4-f5212c96c1ec req-a77beb35-213b-4324-ae50-86c15ef793d8 service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Received event network-vif-deleted-638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2188.731895] env[62875]: INFO nova.compute.manager [req-c1ff85c7-4e14-4883-8cc4-f5212c96c1ec req-a77beb35-213b-4324-ae50-86c15ef793d8 service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Neutron deleted interface 638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9; detaching it from the instance and deleting it from the info cache [ 2188.732120] env[62875]: DEBUG nova.network.neutron [req-c1ff85c7-4e14-4883-8cc4-f5212c96c1ec req-a77beb35-213b-4324-ae50-86c15ef793d8 service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.061053] env[62875]: DEBUG oslo_vmware.api [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.173555] env[62875]: DEBUG nova.network.neutron [-] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.205569] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180406, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149604} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.208336] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2189.209466] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec591df-dc4b-43fa-a35c-d47eae1b796b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.233781] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 37493633-c100-44d8-b1a1-8d462733ba41/37493633-c100-44d8-b1a1-8d462733ba41.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2189.238582] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-081761fb-89d5-4df9-92ba-0b301e2c7f4c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.254973] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b726221e-0b6c-4082-a675-b9a19df662c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.258016] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c9100ca8-a24a-47b8-88c0-82f85a397a77 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.267496] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180407, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.272053] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2189.272053] env[62875]: value = "task-2180408" [ 2189.272053] env[62875]: _type = "Task" [ 2189.272053] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.273202] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f35db9-8bb0-4948-9edf-0284965f1315 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.279490] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3ed377-4fb6-404c-ad86-ead4218e8db1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.344327] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817a280b-c5c4-4b7b-b121-80061b97123d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.347487] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180408, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.347881] env[62875]: DEBUG nova.compute.manager [req-c1ff85c7-4e14-4883-8cc4-f5212c96c1ec req-a77beb35-213b-4324-ae50-86c15ef793d8 service nova] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Detach interface failed, port_id=638fe5f9-1f8d-4d6d-a142-73c1f8e5eec9, reason: Instance 5224c475-8739-4137-82e7-c9d149d41d61 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2189.354247] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df30603a-921d-483e-ae1d-af4e57a12581 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.369792] env[62875]: DEBUG nova.compute.provider_tree [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2189.562417] env[62875]: DEBUG oslo_vmware.api [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.627538} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.562697] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2189.562889] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2189.563083] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2189.563261] env[62875]: INFO nova.compute.manager [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Took 1.79 seconds to destroy the instance on the hypervisor. [ 2189.563501] env[62875]: DEBUG oslo.service.loopingcall [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2189.563715] env[62875]: DEBUG nova.compute.manager [-] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2189.563782] env[62875]: DEBUG nova.network.neutron [-] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2189.676995] env[62875]: INFO nova.compute.manager [-] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Took 1.48 seconds to deallocate network for instance. [ 2189.734129] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.798959} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.734402] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] cb4941dc-1690-46b5-93f9-407198fc1332/cb4941dc-1690-46b5-93f9-407198fc1332.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2189.734624] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2189.734857] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1dcda5a-f9bd-4a00-9b6c-dc95fac0bd33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.742248] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2189.742248] env[62875]: value = "task-2180409" [ 2189.742248] env[62875]: _type = "Task" [ 2189.742248] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.751433] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180409, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.803328] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180408, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.873585] env[62875]: DEBUG nova.scheduler.client.report [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2190.183724] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.253495] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180409, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070389} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.253750] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2190.254543] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fbeaea-627e-4ebf-b106-93cd4818daca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.283626] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] cb4941dc-1690-46b5-93f9-407198fc1332/cb4941dc-1690-46b5-93f9-407198fc1332.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2190.283957] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b79dfc5-76f6-4d3f-8b94-3d70a190afd0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.311214] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180408, 'name': ReconfigVM_Task, 'duration_secs': 0.635211} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.312608] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 37493633-c100-44d8-b1a1-8d462733ba41/37493633-c100-44d8-b1a1-8d462733ba41.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2190.313306] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2190.313306] env[62875]: value = "task-2180410" [ 2190.313306] env[62875]: _type = "Task" [ 2190.313306] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.313554] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0bcc7ec4-5942-4934-a95e-f8ba79aef73c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.323014] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180410, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.324195] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2190.324195] env[62875]: value = "task-2180411" [ 2190.324195] env[62875]: _type = "Task" [ 2190.324195] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.331660] env[62875]: DEBUG nova.network.neutron [-] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2190.332917] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180411, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.760691] env[62875]: DEBUG nova.compute.manager [req-dfc7b9db-adff-47fb-9f28-63275d62b4d6 req-71feb219-8244-44f6-9ba3-2b23ae5e4a7f service nova] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Received event network-vif-deleted-6c2b6b8c-9b11-4731-a57c-2d56d2693b1b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2190.825138] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180410, 'name': ReconfigVM_Task, 'duration_secs': 0.316222} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.825409] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Reconfigured VM instance instance-0000004d to attach disk [datastore2] cb4941dc-1690-46b5-93f9-407198fc1332/cb4941dc-1690-46b5-93f9-407198fc1332.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2190.826070] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-845241e5-6c5b-4518-acf3-a0b2df55748b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.831670] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2190.831670] env[62875]: value = "task-2180412" [ 2190.831670] env[62875]: _type = "Task" [ 2190.831670] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.837563] env[62875]: INFO nova.compute.manager [-] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Took 1.27 seconds to deallocate network for instance. [ 2190.837818] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180411, 'name': Rename_Task, 'duration_secs': 0.14583} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.839681] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2190.844984] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3b3d757-4a80-423c-b046-62a740d5c195 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.846348] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180412, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.851055] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2190.851055] env[62875]: value = "task-2180413" [ 2190.851055] env[62875]: _type = "Task" [ 2190.851055] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.859840] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180413, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.885032] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.359s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.887839] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.704s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.888102] env[62875]: DEBUG nova.objects.instance [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lazy-loading 'resources' on Instance uuid 5224c475-8739-4137-82e7-c9d149d41d61 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2191.342799] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180412, 'name': Rename_Task, 'duration_secs': 0.275508} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.343148] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2191.343408] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0acea854-1391-48d1-a460-97b1bab1265d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.348093] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2191.350041] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2191.350041] env[62875]: value = "task-2180414" [ 2191.350041] env[62875]: _type = "Task" [ 2191.350041] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.362072] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.365314] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180413, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.456451] env[62875]: INFO nova.scheduler.client.report [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted allocation for migration 9265753b-3d23-4aaf-b8fb-0f4a7f38de9d [ 2191.593862] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5ff73f-3536-460c-8419-257093c32cd2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.601787] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff8726b-5721-4755-9407-15fa0de8301f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.633317] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da35c86-817c-4794-8eba-b7b0029ad18e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.640736] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4c5c94-de48-44ee-928d-923bb1279857 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.655232] env[62875]: DEBUG nova.compute.provider_tree [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2191.862023] env[62875]: DEBUG oslo_vmware.api [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180414, 'name': PowerOnVM_Task, 'duration_secs': 0.49545} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.862023] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2191.862023] env[62875]: INFO nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Took 16.80 seconds to spawn the instance on the hypervisor. [ 2191.862023] env[62875]: DEBUG nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2191.862754] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38043c7-d35f-47a0-a42e-e1de3f451782 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.867606] env[62875]: DEBUG oslo_vmware.api [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180413, 'name': PowerOnVM_Task, 'duration_secs': 0.608741} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.868268] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2191.868693] env[62875]: INFO nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Took 10.90 seconds to spawn the instance on the hypervisor. [ 2191.868976] env[62875]: DEBUG nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2191.869927] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894d42e8-7926-49f0-8ee7-59c83c6413b4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.962312] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b62c104c-3e1c-48a6-87c3-6f34d3fb2a21 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.930s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.158300] env[62875]: DEBUG nova.scheduler.client.report [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2192.391768] env[62875]: INFO nova.compute.manager [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Took 23.06 seconds to build instance. [ 2192.398454] env[62875]: INFO nova.compute.manager [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Took 15.78 seconds to build instance. [ 2192.663606] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.665970] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.318s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2192.666494] env[62875]: DEBUG nova.objects.instance [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lazy-loading 'resources' on Instance uuid 85be399c-2482-4a19-b68f-b45aa4e6846b {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2192.686457] env[62875]: INFO nova.scheduler.client.report [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted allocations for instance 5224c475-8739-4137-82e7-c9d149d41d61 [ 2192.753602] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "37493633-c100-44d8-b1a1-8d462733ba41" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2192.896945] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0406212f-af04-46b6-ace8-944bb513d082 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "cb4941dc-1690-46b5-93f9-407198fc1332" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.572s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.899540] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3385c3d4-6fda-433c-8173-f5fcd4bde89b tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "37493633-c100-44d8-b1a1-8d462733ba41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.294s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.899795] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "37493633-c100-44d8-b1a1-8d462733ba41" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.146s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2192.899983] env[62875]: DEBUG nova.compute.manager [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2192.900939] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c568dd6c-3ee7-49cd-83b9-76d8868e84c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.908031] env[62875]: DEBUG nova.compute.manager [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62875) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2192.908446] env[62875]: DEBUG nova.objects.instance [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lazy-loading 'flavor' on Instance uuid 37493633-c100-44d8-b1a1-8d462733ba41 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2193.008751] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.009078] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.009318] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.009532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.009712] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.011773] env[62875]: INFO nova.compute.manager [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Terminating instance [ 2193.031080] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "cb4941dc-1690-46b5-93f9-407198fc1332" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.031080] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "cb4941dc-1690-46b5-93f9-407198fc1332" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.031227] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "cb4941dc-1690-46b5-93f9-407198fc1332-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.031320] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "cb4941dc-1690-46b5-93f9-407198fc1332-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.031508] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "cb4941dc-1690-46b5-93f9-407198fc1332-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.033121] env[62875]: INFO nova.compute.manager [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Terminating instance [ 2193.193555] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18bba349-facf-44ed-ac3a-659168948438 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "5224c475-8739-4137-82e7-c9d149d41d61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.700s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.321545] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2e0449-5c5d-4180-959b-11a27b48f1f0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.328960] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be9cd16-052b-498b-83d1-a5f772fd74ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.358421] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72882783-6e3e-4f44-8802-8372a1b60ebe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.365447] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5fc383-948b-4595-910a-59265b50223e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.378331] env[62875]: DEBUG nova.compute.provider_tree [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2193.452098] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "2a16938e-eeaa-430d-961b-4b060187ba99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.452338] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.515557] env[62875]: DEBUG nova.compute.manager [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2193.515805] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2193.516754] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b8f1e2-8c5f-424d-9614-c5cd5f5dc785 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.524959] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2193.525229] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea7aded2-8e4d-49a1-8b68-dba62a7fba3a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.531017] env[62875]: DEBUG oslo_vmware.api [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2193.531017] env[62875]: value = "task-2180415" [ 2193.531017] env[62875]: _type = "Task" [ 2193.531017] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.535881] env[62875]: DEBUG nova.compute.manager [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2193.536027] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2193.540228] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a273bae-7451-4efe-bc4e-fe70a923025f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.542300] env[62875]: DEBUG oslo_vmware.api [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.547185] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2193.547454] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67b30d48-92f8-459d-9e0f-6c0d12f44571 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.552898] env[62875]: DEBUG oslo_vmware.api [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2193.552898] env[62875]: value = "task-2180416" [ 2193.552898] env[62875]: _type = "Task" [ 2193.552898] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.562316] env[62875]: DEBUG oslo_vmware.api [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.884022] env[62875]: DEBUG nova.scheduler.client.report [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2193.916914] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2193.916914] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03f5774d-bb9f-4e47-a533-865bdcd48b79 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.922393] env[62875]: DEBUG oslo_vmware.api [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2193.922393] env[62875]: value = "task-2180417" [ 2193.922393] env[62875]: _type = "Task" [ 2193.922393] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.931957] env[62875]: DEBUG oslo_vmware.api [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.957796] env[62875]: DEBUG nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2194.041321] env[62875]: DEBUG oslo_vmware.api [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180415, 'name': PowerOffVM_Task, 'duration_secs': 0.220271} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.041851] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2194.042212] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2194.042647] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a2c27ae-551e-46c9-840f-7c519c485e77 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.062012] env[62875]: DEBUG oslo_vmware.api [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180416, 'name': PowerOffVM_Task, 'duration_secs': 0.204611} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.064904] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2194.064904] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2194.064904] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-559b35b3-4c6e-4e75-9d9b-1cba9a0c9911 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.188087] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2194.188087] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2194.188087] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleting the datastore file [datastore2] 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2194.188087] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15f3dede-b58e-467f-91d0-d7308d6c5baf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.190085] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2194.190437] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2194.190873] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleting the datastore file [datastore2] cb4941dc-1690-46b5-93f9-407198fc1332 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2194.191208] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94a7d65f-8f6a-43a3-b990-bbe25198cbdd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.197727] env[62875]: DEBUG oslo_vmware.api [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2194.197727] env[62875]: value = "task-2180421" [ 2194.197727] env[62875]: _type = "Task" [ 2194.197727] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.199430] env[62875]: DEBUG oslo_vmware.api [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2194.199430] env[62875]: value = "task-2180420" [ 2194.199430] env[62875]: _type = "Task" [ 2194.199430] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.210566] env[62875]: DEBUG oslo_vmware.api [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.213554] env[62875]: DEBUG oslo_vmware.api [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.391664] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.723s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2194.395158] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "346f4371-3029-4710-9163-08cf36196207" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.395158] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "346f4371-3029-4710-9163-08cf36196207" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2194.395158] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "346f4371-3029-4710-9163-08cf36196207-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.395158] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "346f4371-3029-4710-9163-08cf36196207-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2194.395158] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "346f4371-3029-4710-9163-08cf36196207-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2194.395905] env[62875]: INFO nova.compute.manager [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Terminating instance [ 2194.407862] env[62875]: INFO nova.scheduler.client.report [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleted allocations for instance 85be399c-2482-4a19-b68f-b45aa4e6846b [ 2194.433933] env[62875]: DEBUG oslo_vmware.api [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180417, 'name': PowerOffVM_Task, 'duration_secs': 0.179595} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.434543] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2194.438083] env[62875]: DEBUG nova.compute.manager [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2194.438083] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedcb37e-68c7-41ae-97b9-e86b3104a94b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.473187] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.476038] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2194.476038] env[62875]: INFO nova.compute.claims [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2194.716803] env[62875]: DEBUG oslo_vmware.api [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169768} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.717448] env[62875]: DEBUG oslo_vmware.api [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158934} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.717889] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2194.718305] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2194.718695] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2194.721204] env[62875]: INFO nova.compute.manager [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Took 1.20 seconds to destroy the instance on the hypervisor. [ 2194.721204] env[62875]: DEBUG oslo.service.loopingcall [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2194.721204] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2194.721204] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2194.721204] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2194.721204] env[62875]: INFO nova.compute.manager [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Took 1.18 seconds to destroy the instance on the hypervisor. [ 2194.721204] env[62875]: DEBUG oslo.service.loopingcall [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2194.721204] env[62875]: DEBUG nova.compute.manager [-] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2194.721204] env[62875]: DEBUG nova.network.neutron [-] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2194.726015] env[62875]: DEBUG nova.compute.manager [-] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2194.726015] env[62875]: DEBUG nova.network.neutron [-] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2194.862836] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.863259] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2194.902520] env[62875]: DEBUG nova.compute.manager [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2194.902520] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2194.902520] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e079b7d7-1aeb-45d3-8d24-34ec29d5c144 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.909932] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2194.910565] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8788c34-d8d5-4878-872e-fb117c3c5a34 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.917592] env[62875]: DEBUG oslo_vmware.api [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2194.917592] env[62875]: value = "task-2180422" [ 2194.917592] env[62875]: _type = "Task" [ 2194.917592] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.921102] env[62875]: DEBUG oslo_concurrency.lockutils [None req-209b3d0e-63a1-40e3-b699-1bc653c36d47 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "85be399c-2482-4a19-b68f-b45aa4e6846b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.653s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2194.927286] env[62875]: DEBUG oslo_vmware.api [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.947281] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dbef66ff-2d21-4aff-bf40-7269cc363e29 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "37493633-c100-44d8-b1a1-8d462733ba41" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.031175] env[62875]: DEBUG nova.compute.manager [req-8b6bfdda-238e-4e7c-b9d7-b96df204d760 req-2d8742d9-4f49-4995-8815-24968456236a service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Received event network-vif-deleted-b1da1efe-c5ed-4fe3-8cfb-ef705696f13b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2195.031175] env[62875]: INFO nova.compute.manager [req-8b6bfdda-238e-4e7c-b9d7-b96df204d760 req-2d8742d9-4f49-4995-8815-24968456236a service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Neutron deleted interface b1da1efe-c5ed-4fe3-8cfb-ef705696f13b; detaching it from the instance and deleting it from the info cache [ 2195.031175] env[62875]: DEBUG nova.network.neutron [req-8b6bfdda-238e-4e7c-b9d7-b96df204d760 req-2d8742d9-4f49-4995-8815-24968456236a service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.100849] env[62875]: DEBUG nova.compute.manager [req-57a88331-6a21-442d-9040-41105d7f25bc req-0fa04f33-f033-46dc-a2d6-a341de495067 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Received event network-vif-deleted-2c18fe19-36d2-4a2f-8c64-e8268acfc359 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2195.100849] env[62875]: INFO nova.compute.manager [req-57a88331-6a21-442d-9040-41105d7f25bc req-0fa04f33-f033-46dc-a2d6-a341de495067 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Neutron deleted interface 2c18fe19-36d2-4a2f-8c64-e8268acfc359; detaching it from the instance and deleting it from the info cache [ 2195.100849] env[62875]: DEBUG nova.network.neutron [req-57a88331-6a21-442d-9040-41105d7f25bc req-0fa04f33-f033-46dc-a2d6-a341de495067 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.132705] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.133255] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.136017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.136017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.136017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.136554] env[62875]: INFO nova.compute.manager [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Terminating instance [ 2195.147990] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "37493633-c100-44d8-b1a1-8d462733ba41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.148389] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "37493633-c100-44d8-b1a1-8d462733ba41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.148726] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "37493633-c100-44d8-b1a1-8d462733ba41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.149066] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "37493633-c100-44d8-b1a1-8d462733ba41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.149417] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "37493633-c100-44d8-b1a1-8d462733ba41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.152089] env[62875]: INFO nova.compute.manager [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Terminating instance [ 2195.368019] env[62875]: DEBUG nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2195.428919] env[62875]: DEBUG oslo_vmware.api [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180422, 'name': PowerOffVM_Task, 'duration_secs': 0.204097} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.429519] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2195.429908] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2195.430308] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d454edc-8b88-49a3-ae11-2829c09fd6c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.515025] env[62875]: DEBUG nova.network.neutron [-] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.536019] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38a3618f-e601-459d-af3b-14bf7433eb04 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.545019] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2195.545019] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2195.545019] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleting the datastore file [datastore1] 346f4371-3029-4710-9163-08cf36196207 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2195.545019] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b4f764d-3dc8-4eb5-9db9-8d8ed0991252 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.549068] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24439f49-1fea-4c72-862c-60dba1cf8a5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.563858] env[62875]: DEBUG oslo_vmware.api [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2195.563858] env[62875]: value = "task-2180424" [ 2195.563858] env[62875]: _type = "Task" [ 2195.563858] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.572822] env[62875]: DEBUG oslo_vmware.api [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180424, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.582439] env[62875]: DEBUG nova.compute.manager [req-8b6bfdda-238e-4e7c-b9d7-b96df204d760 req-2d8742d9-4f49-4995-8815-24968456236a service nova] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Detach interface failed, port_id=b1da1efe-c5ed-4fe3-8cfb-ef705696f13b, reason: Instance cb4941dc-1690-46b5-93f9-407198fc1332 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2195.585395] env[62875]: DEBUG nova.network.neutron [-] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.607854] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6bdc3f56-8219-4574-9185-092fd8bce612 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.615433] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f249aebd-6727-4e16-870d-bece96edeb84 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.641023] env[62875]: DEBUG nova.compute.manager [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2195.641023] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2195.651495] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0484fe-452f-4ac3-befc-e1823f28a52b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.654253] env[62875]: DEBUG nova.compute.manager [req-57a88331-6a21-442d-9040-41105d7f25bc req-0fa04f33-f033-46dc-a2d6-a341de495067 service nova] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Detach interface failed, port_id=2c18fe19-36d2-4a2f-8c64-e8268acfc359, reason: Instance 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2195.655254] env[62875]: DEBUG nova.compute.manager [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2195.655540] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2195.656382] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261f0b91-2250-4b95-86b5-04a1b0a0d907 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.663917] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2195.667083] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9f43ade-2d70-49ad-a182-96ac665691a6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.669552] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2195.670179] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-719998e2-0fd1-4c69-b7ca-5bdbef8fea80 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.675274] env[62875]: DEBUG oslo_vmware.api [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2195.675274] env[62875]: value = "task-2180425" [ 2195.675274] env[62875]: _type = "Task" [ 2195.675274] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.684392] env[62875]: DEBUG oslo_vmware.api [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180425, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.709255] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40315487-aa5d-4368-abe7-1b6ef010ba4d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.715709] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb350b3-3126-470f-ba3f-4cc44ed49969 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.749031] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3b796a-7069-4d83-a717-203e04925cb7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.756407] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46a344c-92f9-4f64-beb0-303845cc1e3a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.772774] env[62875]: DEBUG nova.compute.provider_tree [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2195.858408] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2195.858408] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2195.858408] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleting the datastore file [datastore2] 37493633-c100-44d8-b1a1-8d462733ba41 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2195.858408] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8f1d20b-be02-4c9b-b0d3-8367f07384b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.864542] env[62875]: DEBUG oslo_vmware.api [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2195.864542] env[62875]: value = "task-2180427" [ 2195.864542] env[62875]: _type = "Task" [ 2195.864542] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.884280] env[62875]: DEBUG oslo_vmware.api [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.897188] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.017534] env[62875]: INFO nova.compute.manager [-] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Took 1.29 seconds to deallocate network for instance. [ 2196.080163] env[62875]: DEBUG oslo_vmware.api [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180424, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132904} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.080163] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2196.080163] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2196.080163] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2196.080163] env[62875]: INFO nova.compute.manager [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 346f4371-3029-4710-9163-08cf36196207] Took 1.18 seconds to destroy the instance on the hypervisor. [ 2196.080163] env[62875]: DEBUG oslo.service.loopingcall [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2196.080163] env[62875]: DEBUG nova.compute.manager [-] [instance: 346f4371-3029-4710-9163-08cf36196207] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2196.080163] env[62875]: DEBUG nova.network.neutron [-] [instance: 346f4371-3029-4710-9163-08cf36196207] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2196.090120] env[62875]: INFO nova.compute.manager [-] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Took 1.37 seconds to deallocate network for instance. [ 2196.187346] env[62875]: DEBUG oslo_vmware.api [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180425, 'name': PowerOffVM_Task, 'duration_secs': 0.222383} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.188249] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2196.188249] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2196.188249] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1444f6d7-7355-4e4a-9ae8-395d88ce13f0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.272098] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2196.272378] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2196.272581] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleting the datastore file [datastore1] 79afdeda-8a95-4ad4-ba10-0424cedf1d6f {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2196.272856] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7eef1482-d2f2-4a9e-9007-6fbaee8c9ec5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.275563] env[62875]: DEBUG nova.scheduler.client.report [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2196.283426] env[62875]: DEBUG oslo_vmware.api [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2196.283426] env[62875]: value = "task-2180429" [ 2196.283426] env[62875]: _type = "Task" [ 2196.283426] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.291770] env[62875]: DEBUG oslo_vmware.api [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.375152] env[62875]: DEBUG oslo_vmware.api [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157507} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.375410] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2196.375591] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2196.375761] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2196.375929] env[62875]: INFO nova.compute.manager [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Took 0.72 seconds to destroy the instance on the hypervisor. [ 2196.376197] env[62875]: DEBUG oslo.service.loopingcall [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2196.376386] env[62875]: DEBUG nova.compute.manager [-] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2196.376478] env[62875]: DEBUG nova.network.neutron [-] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2196.529546] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.594206] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.780644] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.307s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.781195] env[62875]: DEBUG nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2196.784011] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.887s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.785530] env[62875]: INFO nova.compute.claims [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2196.797105] env[62875]: DEBUG nova.network.neutron [-] [instance: 346f4371-3029-4710-9163-08cf36196207] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2196.798100] env[62875]: DEBUG oslo_vmware.api [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132726} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.798536] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2196.798776] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2196.799056] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2196.799333] env[62875]: INFO nova.compute.manager [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2196.799589] env[62875]: DEBUG oslo.service.loopingcall [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2196.799823] env[62875]: DEBUG nova.compute.manager [-] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2196.799967] env[62875]: DEBUG nova.network.neutron [-] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2197.061510] env[62875]: DEBUG nova.network.neutron [-] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2197.144836] env[62875]: DEBUG nova.compute.manager [req-cda89567-2430-4e6e-89be-fc950883103c req-04143e7c-0349-4ed4-a4dd-c30cb616a8f0 service nova] [instance: 346f4371-3029-4710-9163-08cf36196207] Received event network-vif-deleted-821254fd-eb8e-4958-8bce-51b2447b3ee9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2197.145550] env[62875]: DEBUG nova.compute.manager [req-cda89567-2430-4e6e-89be-fc950883103c req-04143e7c-0349-4ed4-a4dd-c30cb616a8f0 service nova] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Received event network-vif-deleted-877f91f8-adb7-4379-85a2-2dc8b5b95e10 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2197.148237] env[62875]: DEBUG nova.compute.manager [req-cda89567-2430-4e6e-89be-fc950883103c req-04143e7c-0349-4ed4-a4dd-c30cb616a8f0 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Received event network-vif-deleted-bcd6f07a-19fb-4e85-b080-d747bcddbeb5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2197.148431] env[62875]: INFO nova.compute.manager [req-cda89567-2430-4e6e-89be-fc950883103c req-04143e7c-0349-4ed4-a4dd-c30cb616a8f0 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Neutron deleted interface bcd6f07a-19fb-4e85-b080-d747bcddbeb5; detaching it from the instance and deleting it from the info cache [ 2197.148608] env[62875]: DEBUG nova.network.neutron [req-cda89567-2430-4e6e-89be-fc950883103c req-04143e7c-0349-4ed4-a4dd-c30cb616a8f0 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2197.289833] env[62875]: DEBUG nova.compute.utils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2197.291545] env[62875]: DEBUG nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2197.291735] env[62875]: DEBUG nova.network.neutron [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2197.301021] env[62875]: INFO nova.compute.manager [-] [instance: 346f4371-3029-4710-9163-08cf36196207] Took 1.22 seconds to deallocate network for instance. [ 2197.342446] env[62875]: DEBUG nova.policy [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e818b6d10af42bb9c86e79ae93de507', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7523e34b88d84ec1ae28221d8d1a3591', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2197.564148] env[62875]: INFO nova.compute.manager [-] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Took 1.19 seconds to deallocate network for instance. [ 2197.573450] env[62875]: DEBUG nova.network.neutron [-] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2197.588665] env[62875]: DEBUG nova.network.neutron [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Successfully created port: acf0849a-ab7f-4949-951d-8268baee804c {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2197.650703] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-161b1fa5-5f43-4bd7-9dc2-7e85bb4e3c59 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.659963] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1ef9f0-be22-4b24-b5b7-6083343fd07f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.691643] env[62875]: DEBUG nova.compute.manager [req-cda89567-2430-4e6e-89be-fc950883103c req-04143e7c-0349-4ed4-a4dd-c30cb616a8f0 service nova] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Detach interface failed, port_id=bcd6f07a-19fb-4e85-b080-d747bcddbeb5, reason: Instance 79afdeda-8a95-4ad4-ba10-0424cedf1d6f could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2197.794546] env[62875]: DEBUG nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2197.805771] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.980790] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4549296-0abc-4abb-90e1-805524dae859 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.988560] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f79e7c4-0236-4e83-965d-d9b5d48a1002 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.018768] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9848171-965f-462a-a0ab-493499a7d552 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.026029] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1f1925-9ad9-45a3-b30f-9f50d13fe7fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.039446] env[62875]: DEBUG nova.compute.provider_tree [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2198.076700] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.078246] env[62875]: INFO nova.compute.manager [-] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Took 1.28 seconds to deallocate network for instance. [ 2198.542653] env[62875]: DEBUG nova.scheduler.client.report [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2198.587874] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.807940] env[62875]: DEBUG nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2198.833936] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2198.834356] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2198.834595] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2198.834802] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2198.834993] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2198.835195] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2198.835448] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2198.835651] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2198.835863] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2198.836081] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2198.836302] env[62875]: DEBUG nova.virt.hardware [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2198.837233] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe67c22-b626-44fc-8969-c7bbcc9c5710 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.844965] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfe80fd-11a2-42be-9a8d-7de033fbd3a3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.048328] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2199.048908] env[62875]: DEBUG nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2199.051716] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.522s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2199.052181] env[62875]: DEBUG nova.objects.instance [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lazy-loading 'resources' on Instance uuid cb4941dc-1690-46b5-93f9-407198fc1332 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2199.555296] env[62875]: DEBUG nova.compute.utils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2199.556761] env[62875]: DEBUG nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2199.556943] env[62875]: DEBUG nova.network.neutron [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2199.604395] env[62875]: DEBUG nova.policy [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e79993abf5eb47cc8449e3468d3cdd4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bef7d358bb2746efb448dbf759cac58c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2199.741579] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc0a267-5e26-4bc7-b263-db2f79bb1815 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.749405] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd686b3-ca88-41be-875d-398a1e31e622 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.778530] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabb1b14-6e70-42c3-afd3-8ab912b5d2bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.785749] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897f3de5-929d-4a4f-985f-f8f1bfa1b2c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.800261] env[62875]: DEBUG nova.compute.provider_tree [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2199.927656] env[62875]: DEBUG nova.network.neutron [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Successfully created port: c2a66c75-6cc6-4364-b6f7-c6ad771d208b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2200.062407] env[62875]: DEBUG nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2200.304020] env[62875]: DEBUG nova.scheduler.client.report [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2200.555404] env[62875]: DEBUG nova.compute.manager [req-a72e11a9-22ba-49e0-885c-1a38f6b9af7d req-21a045f5-add3-4fc5-8fe2-fb38282fb64d service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Received event network-vif-plugged-acf0849a-ab7f-4949-951d-8268baee804c {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2200.555631] env[62875]: DEBUG oslo_concurrency.lockutils [req-a72e11a9-22ba-49e0-885c-1a38f6b9af7d req-21a045f5-add3-4fc5-8fe2-fb38282fb64d service nova] Acquiring lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2200.555961] env[62875]: DEBUG oslo_concurrency.lockutils [req-a72e11a9-22ba-49e0-885c-1a38f6b9af7d req-21a045f5-add3-4fc5-8fe2-fb38282fb64d service nova] Lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2200.555961] env[62875]: DEBUG oslo_concurrency.lockutils [req-a72e11a9-22ba-49e0-885c-1a38f6b9af7d req-21a045f5-add3-4fc5-8fe2-fb38282fb64d service nova] Lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.556151] env[62875]: DEBUG nova.compute.manager [req-a72e11a9-22ba-49e0-885c-1a38f6b9af7d req-21a045f5-add3-4fc5-8fe2-fb38282fb64d service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] No waiting events found dispatching network-vif-plugged-acf0849a-ab7f-4949-951d-8268baee804c {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2200.556272] env[62875]: WARNING nova.compute.manager [req-a72e11a9-22ba-49e0-885c-1a38f6b9af7d req-21a045f5-add3-4fc5-8fe2-fb38282fb64d service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Received unexpected event network-vif-plugged-acf0849a-ab7f-4949-951d-8268baee804c for instance with vm_state building and task_state spawning. [ 2200.689818] env[62875]: DEBUG nova.network.neutron [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Successfully updated port: acf0849a-ab7f-4949-951d-8268baee804c {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2200.808884] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.757s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.811298] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.217s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2200.811579] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.813365] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.008s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2200.813706] env[62875]: DEBUG nova.objects.instance [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lazy-loading 'resources' on Instance uuid 346f4371-3029-4710-9163-08cf36196207 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2200.832988] env[62875]: INFO nova.scheduler.client.report [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted allocations for instance cb4941dc-1690-46b5-93f9-407198fc1332 [ 2200.834591] env[62875]: INFO nova.scheduler.client.report [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted allocations for instance 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9 [ 2201.074301] env[62875]: DEBUG nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2201.100160] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2201.100406] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2201.100582] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2201.100768] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2201.100915] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2201.101073] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2201.101285] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2201.101444] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2201.101640] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2201.101812] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2201.101985] env[62875]: DEBUG nova.virt.hardware [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2201.102861] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b30a603-0d43-4b2f-bfd7-1774ff44951c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.111142] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18c4e1f-0fa7-420e-9f7e-393bebf5b5a6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.194475] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2201.194643] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2201.194802] env[62875]: DEBUG nova.network.neutron [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2201.306840] env[62875]: DEBUG nova.compute.manager [req-6b5ce399-d4ab-410d-b474-42e39a03fa0f req-7c1d1e1d-7e84-401c-a4a8-61eb1348ed1b service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Received event network-vif-plugged-c2a66c75-6cc6-4364-b6f7-c6ad771d208b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2201.307170] env[62875]: DEBUG oslo_concurrency.lockutils [req-6b5ce399-d4ab-410d-b474-42e39a03fa0f req-7c1d1e1d-7e84-401c-a4a8-61eb1348ed1b service nova] Acquiring lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.307401] env[62875]: DEBUG oslo_concurrency.lockutils [req-6b5ce399-d4ab-410d-b474-42e39a03fa0f req-7c1d1e1d-7e84-401c-a4a8-61eb1348ed1b service nova] Lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2201.307578] env[62875]: DEBUG oslo_concurrency.lockutils [req-6b5ce399-d4ab-410d-b474-42e39a03fa0f req-7c1d1e1d-7e84-401c-a4a8-61eb1348ed1b service nova] Lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.307754] env[62875]: DEBUG nova.compute.manager [req-6b5ce399-d4ab-410d-b474-42e39a03fa0f req-7c1d1e1d-7e84-401c-a4a8-61eb1348ed1b service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] No waiting events found dispatching network-vif-plugged-c2a66c75-6cc6-4364-b6f7-c6ad771d208b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2201.307923] env[62875]: WARNING nova.compute.manager [req-6b5ce399-d4ab-410d-b474-42e39a03fa0f req-7c1d1e1d-7e84-401c-a4a8-61eb1348ed1b service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Received unexpected event network-vif-plugged-c2a66c75-6cc6-4364-b6f7-c6ad771d208b for instance with vm_state building and task_state spawning. [ 2201.345602] env[62875]: DEBUG oslo_concurrency.lockutils [None req-362cf1fb-cbb0-416f-8dc3-0c517886a579 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "cb4941dc-1690-46b5-93f9-407198fc1332" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.315s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.350440] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0284ce25-c088-4299-9ffd-0eba746fee2c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "85f7c7dc-03c4-44ff-8502-cf61ee7c3af9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.341s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.422995] env[62875]: DEBUG nova.network.neutron [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Successfully updated port: c2a66c75-6cc6-4364-b6f7-c6ad771d208b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2201.496748] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf4b41b-c82d-48e7-8459-8298c8400554 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.504968] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f29229-57e3-42df-bc64-5df12efdb4f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.535789] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f132e908-2209-45e9-a7fe-8b5817f86199 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.543635] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ca4350-b816-4d92-9818-5e786a7285de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.557753] env[62875]: DEBUG nova.compute.provider_tree [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2201.727800] env[62875]: DEBUG nova.network.neutron [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2201.864181] env[62875]: DEBUG nova.network.neutron [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance_info_cache with network_info: [{"id": "acf0849a-ab7f-4949-951d-8268baee804c", "address": "fa:16:3e:29:a5:31", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf0849a-ab", "ovs_interfaceid": "acf0849a-ab7f-4949-951d-8268baee804c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2201.928719] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "refresh_cache-7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2201.928880] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "refresh_cache-7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2201.929049] env[62875]: DEBUG nova.network.neutron [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2201.939520] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.939937] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2201.940250] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.940465] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2201.940644] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.944443] env[62875]: INFO nova.compute.manager [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Terminating instance [ 2202.060790] env[62875]: DEBUG nova.scheduler.client.report [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2202.366799] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2202.367298] env[62875]: DEBUG nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Instance network_info: |[{"id": "acf0849a-ab7f-4949-951d-8268baee804c", "address": "fa:16:3e:29:a5:31", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf0849a-ab", "ovs_interfaceid": "acf0849a-ab7f-4949-951d-8268baee804c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2202.367838] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:a5:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acf0849a-ab7f-4949-951d-8268baee804c', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2202.376301] env[62875]: DEBUG oslo.service.loopingcall [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2202.376603] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2202.376908] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-149ea1bd-9882-4e4a-b7ff-7a9899bed093 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.398813] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2202.398813] env[62875]: value = "task-2180430" [ 2202.398813] env[62875]: _type = "Task" [ 2202.398813] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.408110] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180430, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.447663] env[62875]: DEBUG nova.compute.manager [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2202.448826] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2202.448944] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e7465e-0468-4fa4-9521-6bbd3c3a7ae4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.456677] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2202.456921] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fb23a5a-d39a-4e75-85ae-71f39039a831 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.461114] env[62875]: DEBUG nova.network.neutron [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2202.464761] env[62875]: DEBUG oslo_vmware.api [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2202.464761] env[62875]: value = "task-2180431" [ 2202.464761] env[62875]: _type = "Task" [ 2202.464761] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.473308] env[62875]: DEBUG oslo_vmware.api [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180431, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.569140] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.571991] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.495s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.571991] env[62875]: DEBUG nova.objects.instance [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lazy-loading 'resources' on Instance uuid 37493633-c100-44d8-b1a1-8d462733ba41 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2202.583222] env[62875]: DEBUG nova.compute.manager [req-f6bebb69-2f1a-4368-a047-c08be61ed767 req-188c154f-b891-4acc-ab68-fb3ee1265ecb service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Received event network-changed-acf0849a-ab7f-4949-951d-8268baee804c {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2202.583464] env[62875]: DEBUG nova.compute.manager [req-f6bebb69-2f1a-4368-a047-c08be61ed767 req-188c154f-b891-4acc-ab68-fb3ee1265ecb service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Refreshing instance network info cache due to event network-changed-acf0849a-ab7f-4949-951d-8268baee804c. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2202.583714] env[62875]: DEBUG oslo_concurrency.lockutils [req-f6bebb69-2f1a-4368-a047-c08be61ed767 req-188c154f-b891-4acc-ab68-fb3ee1265ecb service nova] Acquiring lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.583900] env[62875]: DEBUG oslo_concurrency.lockutils [req-f6bebb69-2f1a-4368-a047-c08be61ed767 req-188c154f-b891-4acc-ab68-fb3ee1265ecb service nova] Acquired lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.584085] env[62875]: DEBUG nova.network.neutron [req-f6bebb69-2f1a-4368-a047-c08be61ed767 req-188c154f-b891-4acc-ab68-fb3ee1265ecb service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Refreshing network info cache for port acf0849a-ab7f-4949-951d-8268baee804c {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2202.593724] env[62875]: INFO nova.scheduler.client.report [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted allocations for instance 346f4371-3029-4710-9163-08cf36196207 [ 2202.613679] env[62875]: DEBUG nova.network.neutron [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Updating instance_info_cache with network_info: [{"id": "c2a66c75-6cc6-4364-b6f7-c6ad771d208b", "address": "fa:16:3e:4b:2b:17", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2a66c75-6c", "ovs_interfaceid": "c2a66c75-6cc6-4364-b6f7-c6ad771d208b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2202.910118] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180430, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.975080] env[62875]: DEBUG oslo_vmware.api [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180431, 'name': PowerOffVM_Task, 'duration_secs': 0.228072} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.975310] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2202.975545] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2202.975861] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6efeb7e5-9d3f-448b-9e3d-9349127aad79 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.094129] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2203.094387] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2203.094574] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleting the datastore file [datastore2] d40aaba6-020d-45b9-83e7-8d7fe382b20f {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2203.097129] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53f10137-ff57-45a6-94fd-79637cf663d5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.101945] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e6edada2-648e-4c33-aca3-f7f17f44a7fa tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "346f4371-3029-4710-9163-08cf36196207" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.709s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2203.105945] env[62875]: DEBUG oslo_vmware.api [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2203.105945] env[62875]: value = "task-2180433" [ 2203.105945] env[62875]: _type = "Task" [ 2203.105945] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.117752] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "refresh_cache-7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.118227] env[62875]: DEBUG nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Instance network_info: |[{"id": "c2a66c75-6cc6-4364-b6f7-c6ad771d208b", "address": "fa:16:3e:4b:2b:17", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2a66c75-6c", "ovs_interfaceid": "c2a66c75-6cc6-4364-b6f7-c6ad771d208b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2203.118368] env[62875]: DEBUG oslo_vmware.api [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.120962] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:2b:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2a66c75-6cc6-4364-b6f7-c6ad771d208b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2203.128021] env[62875]: DEBUG oslo.service.loopingcall [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2203.130738] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2203.131363] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b996aead-1ba1-4be7-9b83-adfdb05d2db1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.153628] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2203.153628] env[62875]: value = "task-2180434" [ 2203.153628] env[62875]: _type = "Task" [ 2203.153628] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.164676] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180434, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.241894] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3151d2c-0161-4d7f-aeda-9a74ecef3800 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.251483] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f971a985-3c82-468b-833e-e6356a3606d1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.282435] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1750fe5e-0f13-4662-bfeb-84d13294734e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.289745] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4d7581-4e06-442e-96c5-a7e4b874b947 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.302228] env[62875]: DEBUG nova.compute.provider_tree [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2203.317695] env[62875]: DEBUG nova.network.neutron [req-f6bebb69-2f1a-4368-a047-c08be61ed767 req-188c154f-b891-4acc-ab68-fb3ee1265ecb service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updated VIF entry in instance network info cache for port acf0849a-ab7f-4949-951d-8268baee804c. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2203.317765] env[62875]: DEBUG nova.network.neutron [req-f6bebb69-2f1a-4368-a047-c08be61ed767 req-188c154f-b891-4acc-ab68-fb3ee1265ecb service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance_info_cache with network_info: [{"id": "acf0849a-ab7f-4949-951d-8268baee804c", "address": "fa:16:3e:29:a5:31", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf0849a-ab", "ovs_interfaceid": "acf0849a-ab7f-4949-951d-8268baee804c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2203.330042] env[62875]: DEBUG nova.compute.manager [req-4e91ea04-ef7f-4691-9889-0a0667ddeda0 req-cf2635ba-e92b-41e6-b846-d153e5407be1 service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Received event network-changed-c2a66c75-6cc6-4364-b6f7-c6ad771d208b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2203.330229] env[62875]: DEBUG nova.compute.manager [req-4e91ea04-ef7f-4691-9889-0a0667ddeda0 req-cf2635ba-e92b-41e6-b846-d153e5407be1 service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Refreshing instance network info cache due to event network-changed-c2a66c75-6cc6-4364-b6f7-c6ad771d208b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2203.330441] env[62875]: DEBUG oslo_concurrency.lockutils [req-4e91ea04-ef7f-4691-9889-0a0667ddeda0 req-cf2635ba-e92b-41e6-b846-d153e5407be1 service nova] Acquiring lock "refresh_cache-7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.330582] env[62875]: DEBUG oslo_concurrency.lockutils [req-4e91ea04-ef7f-4691-9889-0a0667ddeda0 req-cf2635ba-e92b-41e6-b846-d153e5407be1 service nova] Acquired lock "refresh_cache-7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.330738] env[62875]: DEBUG nova.network.neutron [req-4e91ea04-ef7f-4691-9889-0a0667ddeda0 req-cf2635ba-e92b-41e6-b846-d153e5407be1 service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Refreshing network info cache for port c2a66c75-6cc6-4364-b6f7-c6ad771d208b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2203.412458] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180430, 'name': CreateVM_Task, 'duration_secs': 0.721117} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.412660] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2203.413361] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.413532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.413872] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2203.414151] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1df6e14-4670-44e9-85e7-e28efe047c2f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.418670] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2203.418670] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ffc5ec-cb8c-1dcb-3693-c142602a6f4f" [ 2203.418670] env[62875]: _type = "Task" [ 2203.418670] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.428082] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ffc5ec-cb8c-1dcb-3693-c142602a6f4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.615799] env[62875]: DEBUG oslo_vmware.api [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140264} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.616241] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2203.616241] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2203.616421] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2203.616595] env[62875]: INFO nova.compute.manager [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2203.616836] env[62875]: DEBUG oslo.service.loopingcall [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2203.617034] env[62875]: DEBUG nova.compute.manager [-] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2203.617130] env[62875]: DEBUG nova.network.neutron [-] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2203.663461] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180434, 'name': CreateVM_Task, 'duration_secs': 0.401179} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.663605] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2203.664206] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.808069] env[62875]: DEBUG nova.scheduler.client.report [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2203.821228] env[62875]: DEBUG oslo_concurrency.lockutils [req-f6bebb69-2f1a-4368-a047-c08be61ed767 req-188c154f-b891-4acc-ab68-fb3ee1265ecb service nova] Releasing lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.931108] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ffc5ec-cb8c-1dcb-3693-c142602a6f4f, 'name': SearchDatastore_Task, 'duration_secs': 0.00993} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.931408] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.931664] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2203.931898] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.932127] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.932236] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2203.932509] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.932838] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2203.933067] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b91410e4-84aa-46a2-8e88-49e3e55dbd56 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.934923] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5168522-8436-4a28-b03a-dd31d4e9c112 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.939942] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2203.939942] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520932df-f624-d973-69ee-5c2bb7bc64de" [ 2203.939942] env[62875]: _type = "Task" [ 2203.939942] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.943977] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2203.944171] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2203.947155] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfee512a-c564-4b39-9606-7f3cffb555be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.952885] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520932df-f624-d973-69ee-5c2bb7bc64de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.955744] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2203.955744] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cce3bf-d63e-84cd-314e-d2319a5d1fd0" [ 2203.955744] env[62875]: _type = "Task" [ 2203.955744] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.963467] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cce3bf-d63e-84cd-314e-d2319a5d1fd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.079708] env[62875]: DEBUG nova.network.neutron [req-4e91ea04-ef7f-4691-9889-0a0667ddeda0 req-cf2635ba-e92b-41e6-b846-d153e5407be1 service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Updated VIF entry in instance network info cache for port c2a66c75-6cc6-4364-b6f7-c6ad771d208b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2204.080144] env[62875]: DEBUG nova.network.neutron [req-4e91ea04-ef7f-4691-9889-0a0667ddeda0 req-cf2635ba-e92b-41e6-b846-d153e5407be1 service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Updating instance_info_cache with network_info: [{"id": "c2a66c75-6cc6-4364-b6f7-c6ad771d208b", "address": "fa:16:3e:4b:2b:17", "network": {"id": "789ebcad-b019-457d-b13a-0a16082839e3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-200337265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef7d358bb2746efb448dbf759cac58c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2a66c75-6c", "ovs_interfaceid": "c2a66c75-6cc6-4364-b6f7-c6ad771d208b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.314921] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.743s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.317680] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.730s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.317917] env[62875]: DEBUG nova.objects.instance [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lazy-loading 'resources' on Instance uuid 79afdeda-8a95-4ad4-ba10-0424cedf1d6f {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2204.332661] env[62875]: INFO nova.scheduler.client.report [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted allocations for instance 37493633-c100-44d8-b1a1-8d462733ba41 [ 2204.408226] env[62875]: DEBUG nova.network.neutron [-] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.450225] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520932df-f624-d973-69ee-5c2bb7bc64de, 'name': SearchDatastore_Task, 'duration_secs': 0.019461} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.450519] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.450753] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2204.450958] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.464790] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cce3bf-d63e-84cd-314e-d2319a5d1fd0, 'name': SearchDatastore_Task, 'duration_secs': 0.009642} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.465505] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5fdc0cd-e923-45aa-8744-ab877cd0f385 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.470114] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2204.470114] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c4386d-4ab9-5647-01c7-6d71addff9ed" [ 2204.470114] env[62875]: _type = "Task" [ 2204.470114] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.477166] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c4386d-4ab9-5647-01c7-6d71addff9ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.582521] env[62875]: DEBUG oslo_concurrency.lockutils [req-4e91ea04-ef7f-4691-9889-0a0667ddeda0 req-cf2635ba-e92b-41e6-b846-d153e5407be1 service nova] Releasing lock "refresh_cache-7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.843087] env[62875]: DEBUG oslo_concurrency.lockutils [None req-66f57e41-1fdf-4a1b-8a82-35f14ad40739 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "37493633-c100-44d8-b1a1-8d462733ba41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.694s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.910999] env[62875]: INFO nova.compute.manager [-] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Took 1.29 seconds to deallocate network for instance. [ 2204.949807] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137515c5-a981-4ac6-9fa3-efc77ed328df {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.957991] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647681d2-4762-4b17-ba78-8a78e9f2c0c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.992356] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b292a31-b9e9-4f96-b2ef-7a033f2280f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.002778] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c4386d-4ab9-5647-01c7-6d71addff9ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009002} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.003972] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d35845-db90-47f4-9bd4-eeb6e4f6f09d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.007717] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.007989] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 2a16938e-eeaa-430d-961b-4b060187ba99/2a16938e-eeaa-430d-961b-4b060187ba99.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2205.008274] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2205.008468] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2205.008659] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2de8d2b6-b42a-42b2-a03e-10172a3067c6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.010771] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e9c058b-2dc0-495b-8e44-0f16a530c818 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.020759] env[62875]: DEBUG nova.compute.provider_tree [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2205.023930] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2205.023930] env[62875]: value = "task-2180435" [ 2205.023930] env[62875]: _type = "Task" [ 2205.023930] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.027813] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2205.028041] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2205.028964] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22593ff9-fd09-4501-b108-04dc333af7c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.034860] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180435, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.037806] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2205.037806] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a1e7a8-0b75-3f15-a325-a14940e5c33c" [ 2205.037806] env[62875]: _type = "Task" [ 2205.037806] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.044856] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a1e7a8-0b75-3f15-a325-a14940e5c33c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.356098] env[62875]: DEBUG nova.compute.manager [req-15c9cfd8-5345-4a37-b3ed-a66c87d78ee1 req-614d416e-42e5-4965-acef-2f5ac31b0870 service nova] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Received event network-vif-deleted-b79962c5-9f95-4d9b-ae67-11445b571d91 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2205.417337] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.445637] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.446093] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.524197] env[62875]: DEBUG nova.scheduler.client.report [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2205.536414] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180435, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481234} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.536656] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 2a16938e-eeaa-430d-961b-4b060187ba99/2a16938e-eeaa-430d-961b-4b060187ba99.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2205.536869] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2205.537120] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa43a7e9-721a-4b4b-a78d-1a42ea28488c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.548764] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a1e7a8-0b75-3f15-a325-a14940e5c33c, 'name': SearchDatastore_Task, 'duration_secs': 0.008362} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.550609] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2205.550609] env[62875]: value = "task-2180436" [ 2205.550609] env[62875]: _type = "Task" [ 2205.550609] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.550788] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea77fad2-c6a9-442b-90e7-1a8750c9c8d9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.559114] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2205.559114] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5219f3d3-6f66-9952-ff65-8b64ef2d316e" [ 2205.559114] env[62875]: _type = "Task" [ 2205.559114] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.561961] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.569361] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5219f3d3-6f66-9952-ff65-8b64ef2d316e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.949171] env[62875]: DEBUG nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2206.029464] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.712s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.031817] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.615s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.032063] env[62875]: DEBUG nova.objects.instance [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lazy-loading 'resources' on Instance uuid d40aaba6-020d-45b9-83e7-8d7fe382b20f {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2206.052582] env[62875]: INFO nova.scheduler.client.report [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleted allocations for instance 79afdeda-8a95-4ad4-ba10-0424cedf1d6f [ 2206.064732] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06263} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.067961] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2206.068701] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e89b44e-c397-4e0b-af16-33a89d47d70b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.076384] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5219f3d3-6f66-9952-ff65-8b64ef2d316e, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.085525] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.085812] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513/7b3b22c7-26e2-46bf-82b4-8a2b1e68d513.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2206.094928] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 2a16938e-eeaa-430d-961b-4b060187ba99/2a16938e-eeaa-430d-961b-4b060187ba99.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2206.095211] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8153e12-0ef0-47d0-8748-597d236c8362 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.097368] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5bd564f-839d-4a22-8ad8-d46dcbd86450 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.117379] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2206.117379] env[62875]: value = "task-2180437" [ 2206.117379] env[62875]: _type = "Task" [ 2206.117379] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.121536] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2206.121536] env[62875]: value = "task-2180438" [ 2206.121536] env[62875]: _type = "Task" [ 2206.121536] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.127834] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.133509] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180438, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.471263] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.563304] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3cf8761d-a0b8-401a-9118-4f490c058806 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "79afdeda-8a95-4ad4-ba10-0424cedf1d6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.430s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.628081] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180437, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460401} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.630792] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513/7b3b22c7-26e2-46bf-82b4-8a2b1e68d513.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2206.631055] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2206.631451] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f188430-68bb-4ca5-9374-cc72c3ed2fe7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.635953] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180438, 'name': ReconfigVM_Task, 'duration_secs': 0.371013} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.636564] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 2a16938e-eeaa-430d-961b-4b060187ba99/2a16938e-eeaa-430d-961b-4b060187ba99.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2206.637178] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f7c2c4d-0edc-40ef-82b8-8c3a1d15a536 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.643657] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2206.643657] env[62875]: value = "task-2180439" [ 2206.643657] env[62875]: _type = "Task" [ 2206.643657] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.644044] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2206.644044] env[62875]: value = "task-2180440" [ 2206.644044] env[62875]: _type = "Task" [ 2206.644044] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.655025] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.659931] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180440, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.671192] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d840a2e5-5a81-451f-84de-7a344399833a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.678578] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1780bc5c-d91f-4529-aeb9-ed62a43e7d85 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.712570] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ccfb47-f709-48de-8280-c1a5693f03b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.724378] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ee03e0-880e-4cdc-a74d-abaa2dfbfb6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.739027] env[62875]: DEBUG nova.compute.provider_tree [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2207.156797] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062651} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.159746] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2207.160051] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180440, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.160718] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084d6c04-6159-4144-98e3-f478200056c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.181764] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513/7b3b22c7-26e2-46bf-82b4-8a2b1e68d513.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2207.181996] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3be737a-f882-44a1-b1fa-e7424305964f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.201022] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2207.201022] env[62875]: value = "task-2180441" [ 2207.201022] env[62875]: _type = "Task" [ 2207.201022] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.207754] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180441, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.219277] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "2dd748c2-048d-4450-a393-995249a9deb8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2207.219526] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "2dd748c2-048d-4450-a393-995249a9deb8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2207.219756] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "2dd748c2-048d-4450-a393-995249a9deb8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2207.219949] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "2dd748c2-048d-4450-a393-995249a9deb8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2207.220134] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "2dd748c2-048d-4450-a393-995249a9deb8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.222096] env[62875]: INFO nova.compute.manager [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Terminating instance [ 2207.242881] env[62875]: DEBUG nova.scheduler.client.report [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2207.658300] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180440, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.711394] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180441, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.725171] env[62875]: DEBUG nova.compute.manager [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2207.725368] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2207.726125] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd66bd9-9b71-4d3b-aaaf-98df3a84125a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.732823] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2207.733123] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48a630e3-92e6-43df-af1a-484f720097d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.738586] env[62875]: DEBUG oslo_vmware.api [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2207.738586] env[62875]: value = "task-2180442" [ 2207.738586] env[62875]: _type = "Task" [ 2207.738586] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.745861] env[62875]: DEBUG oslo_vmware.api [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.746527] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.748622] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.278s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2207.750160] env[62875]: INFO nova.compute.claims [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2207.770685] env[62875]: INFO nova.scheduler.client.report [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted allocations for instance d40aaba6-020d-45b9-83e7-8d7fe382b20f [ 2208.158870] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180440, 'name': Rename_Task, 'duration_secs': 1.149048} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.159168] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2208.159410] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38cd6316-4809-467d-af42-4dd1d35b1d8f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.165149] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2208.165149] env[62875]: value = "task-2180443" [ 2208.165149] env[62875]: _type = "Task" [ 2208.165149] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.172372] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.210370] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180441, 'name': ReconfigVM_Task, 'duration_secs': 0.840584} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.210489] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513/7b3b22c7-26e2-46bf-82b4-8a2b1e68d513.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2208.211060] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1cd86f73-d1a5-481e-a582-e161621b7283 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.216610] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2208.216610] env[62875]: value = "task-2180444" [ 2208.216610] env[62875]: _type = "Task" [ 2208.216610] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.224431] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180444, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.247379] env[62875]: DEBUG oslo_vmware.api [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180442, 'name': PowerOffVM_Task, 'duration_secs': 0.186751} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.247650] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2208.247821] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2208.248073] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-653f1c6e-1988-419d-a952-160e797cd4c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.277836] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f1b5b90a-1a01-4026-94a2-f2d6bcaa4dbc tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d40aaba6-020d-45b9-83e7-8d7fe382b20f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.338s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.674953] env[62875]: DEBUG oslo_vmware.api [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180443, 'name': PowerOnVM_Task, 'duration_secs': 0.419077} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.675480] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2208.675809] env[62875]: INFO nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Took 9.87 seconds to spawn the instance on the hypervisor. [ 2208.676152] env[62875]: DEBUG nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2208.677048] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19531ca1-bf24-41c1-a07d-0f803bd1236f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.727719] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180444, 'name': Rename_Task, 'duration_secs': 0.130656} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.728018] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2208.728278] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23a612a3-754d-4805-92a0-6b06abcdfc99 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.734281] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2208.734281] env[62875]: value = "task-2180446" [ 2208.734281] env[62875]: _type = "Task" [ 2208.734281] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.742590] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.886493] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fca291-321d-453a-83d5-4c2dba44ab12 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.894365] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fdc380-9c4e-4a90-be47-4bab638719c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.925031] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65edaff-8696-4eb6-a3ab-10041b14846f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.932258] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f7d1ca-410c-467d-9be3-430c9da9f6a9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.946134] env[62875]: DEBUG nova.compute.provider_tree [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2209.046091] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "d751c6df-1e27-4b6a-a88a-cd15456914a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2209.046345] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d751c6df-1e27-4b6a-a88a-cd15456914a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2209.195048] env[62875]: INFO nova.compute.manager [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Took 14.73 seconds to build instance. [ 2209.245161] env[62875]: DEBUG oslo_vmware.api [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180446, 'name': PowerOnVM_Task, 'duration_secs': 0.451137} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.245363] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2209.245565] env[62875]: INFO nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Took 8.17 seconds to spawn the instance on the hypervisor. [ 2209.245746] env[62875]: DEBUG nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2209.246557] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db24de4-c624-45d3-a186-a8918e74add1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.451071] env[62875]: DEBUG nova.scheduler.client.report [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2209.549022] env[62875]: DEBUG nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2209.696700] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5fb8657a-7eb3-47f2-a079-7e2497ebd2a7 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.244s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.764681] env[62875]: INFO nova.compute.manager [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Took 13.89 seconds to build instance. [ 2209.955280] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.955882] env[62875]: DEBUG nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2210.067218] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.067504] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.068971] env[62875]: INFO nova.compute.claims [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2210.269638] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7a594591-1b83-4b63-a626-be1c2df41b66 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.406s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.460973] env[62875]: DEBUG nova.compute.utils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2210.462470] env[62875]: DEBUG nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2210.462655] env[62875]: DEBUG nova.network.neutron [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2210.499699] env[62875]: DEBUG nova.policy [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52db0a44319f46939b47247136267ceb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5062c761ea34842a2f6179ae76f3465', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2210.768048] env[62875]: DEBUG nova.network.neutron [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Successfully created port: de2d9317-1076-42cf-8bfa-ccba42959961 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2210.967299] env[62875]: DEBUG nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2210.998201] env[62875]: DEBUG nova.compute.manager [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2210.999198] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a910d9b-a3c2-42b2-a4f9-f147bace5488 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.075035] env[62875]: DEBUG nova.compute.manager [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Stashing vm_state: active {{(pid=62875) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2211.206952] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cdc6fc-72d4-46d1-b5a8-9c1c58caea14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.214607] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbd43d0-f4d2-4132-8c8d-44979930c541 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.243597] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a47fcc-2daf-4104-b355-60cbf67ac4a8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.251161] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23268cc8-71b6-4e25-9b4f-d40bb5f966c0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.265655] env[62875]: DEBUG nova.compute.provider_tree [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2211.511135] env[62875]: INFO nova.compute.manager [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] instance snapshotting [ 2211.513810] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95eb931-5527-4155-9d12-058e01b25457 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.532699] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e68c30-6fdb-4855-a58f-7cdf6865a639 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.595264] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.769460] env[62875]: DEBUG nova.scheduler.client.report [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2211.976840] env[62875]: DEBUG nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2212.002253] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2212.002523] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2212.002985] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2212.003330] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2212.003560] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2212.003754] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2212.004062] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2212.004246] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2212.004423] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2212.004591] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2212.004768] env[62875]: DEBUG nova.virt.hardware [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2212.005643] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc6be6d-78ca-46a5-9407-9afdc52de6eb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.014172] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b549d4b4-a240-4d3a-890b-065fd9ab9583 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.043177] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2212.043814] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a329d886-bbdc-4c73-a5bf-899ba0524b08 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.051193] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2212.051193] env[62875]: value = "task-2180447" [ 2212.051193] env[62875]: _type = "Task" [ 2212.051193] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.059164] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180447, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.274435] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2212.275022] env[62875]: DEBUG nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2212.277648] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.682s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2212.560985] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180447, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.780790] env[62875]: DEBUG nova.compute.utils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2212.784290] env[62875]: INFO nova.compute.claims [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2212.787748] env[62875]: DEBUG nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2212.787927] env[62875]: DEBUG nova.network.neutron [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2212.826837] env[62875]: DEBUG nova.policy [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3d0e175791341aea0db00ef8a1b5680', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '226340868e7446cca12688a32d13c630', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2213.064356] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180447, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.099894] env[62875]: DEBUG nova.network.neutron [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Successfully created port: b3066f71-cb70-4af1-bab0-ad595fb59fd9 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2213.288575] env[62875]: DEBUG nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2213.293660] env[62875]: INFO nova.compute.resource_tracker [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating resource usage from migration 3964c284-0811-449d-8064-51072de6a67a [ 2213.428655] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bb7007-35d2-499b-9060-293b3b702dca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.436331] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1a3c24-8400-485d-ac94-a7bf9a790797 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.468782] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3538da0c-0dc8-4a03-88eb-4dc88b42a131 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.476324] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f81854-8f1f-40f0-970d-8823884fe001 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.489698] env[62875]: DEBUG nova.compute.provider_tree [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2213.563647] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180447, 'name': CreateSnapshot_Task, 'duration_secs': 1.330057} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.564013] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2213.564925] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db36bd1-9f46-4d72-a92b-38995dc9237e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.993338] env[62875]: DEBUG nova.scheduler.client.report [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2214.081634] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2214.081953] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-50ad4693-1462-4d52-b5ac-c4d98d4620fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.091499] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2214.091499] env[62875]: value = "task-2180448" [ 2214.091499] env[62875]: _type = "Task" [ 2214.091499] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2214.099739] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.301925] env[62875]: DEBUG nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2214.328985] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2214.329466] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2214.329782] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2214.330114] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2214.330408] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2214.330685] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2214.332058] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2214.332058] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2214.332058] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2214.332058] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2214.332058] env[62875]: DEBUG nova.virt.hardware [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2214.332638] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862fb92b-7af7-4df6-8511-23594f6cbe3d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.340670] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca30452-35e5-43d4-8dd3-0de79c455fc9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.498951] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.221s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.499123] env[62875]: INFO nova.compute.manager [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Migrating [ 2214.601996] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.632522] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.632850] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.633007] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2215.014183] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2215.014346] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2215.014525] env[62875]: DEBUG nova.network.neutron [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2215.102664] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.605982] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.637972] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 2215.638212] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.638450] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.638642] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.638833] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.639057] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.639340] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.639480] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2215.639702] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.711288] env[62875]: DEBUG nova.network.neutron [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance_info_cache with network_info: [{"id": "acf0849a-ab7f-4949-951d-8268baee804c", "address": "fa:16:3e:29:a5:31", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf0849a-ab", "ovs_interfaceid": "acf0849a-ab7f-4949-951d-8268baee804c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.103509] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.142760] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.143086] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.143269] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.143428] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2216.144299] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492deb47-8849-48a0-a60d-3e66c81108ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.152282] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c31f33-1c27-4d49-9edc-9bac0ecd6b93 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.166060] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eb25e4-32e0-4206-acfe-12705be03869 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.172697] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04e7369-7fed-4636-8957-9bc0575bf55b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.201233] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179909MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2216.201440] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.201614] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.214153] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2216.603814] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.106834] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.156976] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2217.157233] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2217.157449] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleting the datastore file [datastore2] 2dd748c2-048d-4450-a393-995249a9deb8 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2217.157809] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b94d3916-9be9-498e-ab18-ad23ed14ba33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.165743] env[62875]: DEBUG oslo_vmware.api [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for the task: (returnval){ [ 2217.165743] env[62875]: value = "task-2180449" [ 2217.165743] env[62875]: _type = "Task" [ 2217.165743] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.175597] env[62875]: DEBUG oslo_vmware.api [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.210554] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Applying migration context for instance 2a16938e-eeaa-430d-961b-4b060187ba99 as it has an incoming, in-progress migration 3964c284-0811-449d-8064-51072de6a67a. Migration status is migrating {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2217.212038] env[62875]: INFO nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating resource usage from migration 3964c284-0811-449d-8064-51072de6a67a [ 2217.230305] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7969485a-ccd6-48e0-bdea-b8920af28843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.230473] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 9e0aaea6-96cf-494d-9f70-a709a47f9772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.230613] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.230738] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 70547fbd-7ce8-466e-8abc-b490b8dd6b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.230860] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 45403db3-ff20-42d3-8a37-8db671d8c1fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.230988] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2dd748c2-048d-4450-a393-995249a9deb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.231119] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.231246] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c4b43f9a-9c49-4281-a102-5d34f26cc9df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.231362] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d751c6df-1e27-4b6a-a88a-cd15456914a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.231501] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Migration 3964c284-0811-449d-8064-51072de6a67a is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2217.231600] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2a16938e-eeaa-430d-961b-4b060187ba99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2217.231814] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2217.231949] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2217.358999] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fbaa72-2894-4ee0-8075-2c096b411cd0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.366502] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb70a9c0-cf1b-43be-b27e-8999a45c5aef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.398923] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398a3a2c-f377-495b-9000-7dd28b935816 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.407064] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295a31b8-5409-471c-ad05-1b1787e401e2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.421452] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2217.605577] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task} progress is 95%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.677452] env[62875]: DEBUG oslo_vmware.api [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Task: {'id': task-2180449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148499} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.677721] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2217.677905] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2217.678149] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2217.678356] env[62875]: INFO nova.compute.manager [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Took 9.95 seconds to destroy the instance on the hypervisor. [ 2217.678598] env[62875]: DEBUG oslo.service.loopingcall [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2217.678787] env[62875]: DEBUG nova.compute.manager [-] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2217.678881] env[62875]: DEBUG nova.network.neutron [-] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2217.728219] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24b4eb0-c163-4ffa-9e85-b91f5158f4bb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.749968] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance '2a16938e-eeaa-430d-961b-4b060187ba99' progress to 0 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2217.924693] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2217.963527] env[62875]: DEBUG nova.compute.manager [req-b3a74a80-82e0-4caa-a7f1-a3d52d89f62d req-42aa76e3-4ce7-420b-927a-d27c419fe44a service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Received event network-vif-deleted-846fd804-bdbf-498c-a3f7-9741200ee2d4 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2217.963733] env[62875]: INFO nova.compute.manager [req-b3a74a80-82e0-4caa-a7f1-a3d52d89f62d req-42aa76e3-4ce7-420b-927a-d27c419fe44a service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Neutron deleted interface 846fd804-bdbf-498c-a3f7-9741200ee2d4; detaching it from the instance and deleting it from the info cache [ 2217.963907] env[62875]: DEBUG nova.network.neutron [req-b3a74a80-82e0-4caa-a7f1-a3d52d89f62d req-42aa76e3-4ce7-420b-927a-d27c419fe44a service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.105867] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180448, 'name': CloneVM_Task, 'duration_secs': 3.985202} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.106159] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Created linked-clone VM from snapshot [ 2218.106886] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f67485-5efb-4fc5-9a83-08e1bfa03e06 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.113988] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Uploading image e2918bee-ba5b-436b-9276-4c83d567218f {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2218.126516] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2218.126787] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-531155f5-19ce-4e41-8dc6-8d4258790ec0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.133077] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2218.133077] env[62875]: value = "task-2180450" [ 2218.133077] env[62875]: _type = "Task" [ 2218.133077] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.141423] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180450, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.256536] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2218.256848] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ddc0582-39fc-4b29-bffb-f274ce741a8a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.264453] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2218.264453] env[62875]: value = "task-2180451" [ 2218.264453] env[62875]: _type = "Task" [ 2218.264453] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.273663] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.429122] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2218.429304] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.228s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.448225] env[62875]: DEBUG nova.network.neutron [-] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.466435] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b23306c-87a2-4508-8dba-ad9ef73364ae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.475806] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdf1b3d-0c7d-4389-99af-648cb557f6d5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.491918] env[62875]: DEBUG nova.compute.manager [req-3d25b255-27f8-45e2-95c7-17716e871439 req-68acc62c-055b-4e85-82ba-605d8423b2ca service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Received event network-vif-plugged-b3066f71-cb70-4af1-bab0-ad595fb59fd9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2218.492152] env[62875]: DEBUG oslo_concurrency.lockutils [req-3d25b255-27f8-45e2-95c7-17716e871439 req-68acc62c-055b-4e85-82ba-605d8423b2ca service nova] Acquiring lock "d751c6df-1e27-4b6a-a88a-cd15456914a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.492363] env[62875]: DEBUG oslo_concurrency.lockutils [req-3d25b255-27f8-45e2-95c7-17716e871439 req-68acc62c-055b-4e85-82ba-605d8423b2ca service nova] Lock "d751c6df-1e27-4b6a-a88a-cd15456914a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.492592] env[62875]: DEBUG oslo_concurrency.lockutils [req-3d25b255-27f8-45e2-95c7-17716e871439 req-68acc62c-055b-4e85-82ba-605d8423b2ca service nova] Lock "d751c6df-1e27-4b6a-a88a-cd15456914a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.492755] env[62875]: DEBUG nova.compute.manager [req-3d25b255-27f8-45e2-95c7-17716e871439 req-68acc62c-055b-4e85-82ba-605d8423b2ca service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] No waiting events found dispatching network-vif-plugged-b3066f71-cb70-4af1-bab0-ad595fb59fd9 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2218.493104] env[62875]: WARNING nova.compute.manager [req-3d25b255-27f8-45e2-95c7-17716e871439 req-68acc62c-055b-4e85-82ba-605d8423b2ca service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Received unexpected event network-vif-plugged-b3066f71-cb70-4af1-bab0-ad595fb59fd9 for instance with vm_state building and task_state spawning. [ 2218.505804] env[62875]: DEBUG nova.compute.manager [req-b3a74a80-82e0-4caa-a7f1-a3d52d89f62d req-42aa76e3-4ce7-420b-927a-d27c419fe44a service nova] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Detach interface failed, port_id=846fd804-bdbf-498c-a3f7-9741200ee2d4, reason: Instance 2dd748c2-048d-4450-a393-995249a9deb8 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2218.578597] env[62875]: DEBUG nova.network.neutron [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Successfully updated port: b3066f71-cb70-4af1-bab0-ad595fb59fd9 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2218.642784] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180450, 'name': Destroy_Task, 'duration_secs': 0.474029} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.643732] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Destroyed the VM [ 2218.643732] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2218.643867] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fd603351-ef01-40b1-9a12-6804e084a6d4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.649788] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2218.649788] env[62875]: value = "task-2180452" [ 2218.649788] env[62875]: _type = "Task" [ 2218.649788] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.658357] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180452, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.774884] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180451, 'name': PowerOffVM_Task, 'duration_secs': 0.239996} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.775158] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2218.775342] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance '2a16938e-eeaa-430d-961b-4b060187ba99' progress to 17 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2218.836094] env[62875]: DEBUG nova.network.neutron [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Successfully updated port: de2d9317-1076-42cf-8bfa-ccba42959961 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2218.950746] env[62875]: INFO nova.compute.manager [-] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Took 1.27 seconds to deallocate network for instance. [ 2219.081167] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "refresh_cache-d751c6df-1e27-4b6a-a88a-cd15456914a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2219.081282] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "refresh_cache-d751c6df-1e27-4b6a-a88a-cd15456914a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2219.081465] env[62875]: DEBUG nova.network.neutron [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2219.160263] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180452, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.282241] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2219.282810] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2219.282810] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2219.282916] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2219.283062] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2219.283275] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2219.283414] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2219.283570] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2219.283733] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2219.283895] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2219.284143] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2219.289251] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19fdc79f-6c76-45f6-a1f7-5a3f72402c0b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.308466] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2219.308466] env[62875]: value = "task-2180453" [ 2219.308466] env[62875]: _type = "Task" [ 2219.308466] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.316412] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.338330] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-c4b43f9a-9c49-4281-a102-5d34f26cc9df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2219.338532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-c4b43f9a-9c49-4281-a102-5d34f26cc9df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2219.338754] env[62875]: DEBUG nova.network.neutron [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2219.456882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.457394] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.457677] env[62875]: DEBUG nova.objects.instance [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lazy-loading 'resources' on Instance uuid 2dd748c2-048d-4450-a393-995249a9deb8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2219.614486] env[62875]: DEBUG nova.network.neutron [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2219.662611] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180452, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.750029] env[62875]: DEBUG nova.network.neutron [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Updating instance_info_cache with network_info: [{"id": "b3066f71-cb70-4af1-bab0-ad595fb59fd9", "address": "fa:16:3e:a3:3c:61", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3066f71-cb", "ovs_interfaceid": "b3066f71-cb70-4af1-bab0-ad595fb59fd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.817955] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180453, 'name': ReconfigVM_Task, 'duration_secs': 0.349067} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.818273] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance '2a16938e-eeaa-430d-961b-4b060187ba99' progress to 33 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2219.866694] env[62875]: DEBUG nova.network.neutron [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2219.990924] env[62875]: DEBUG nova.network.neutron [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Updating instance_info_cache with network_info: [{"id": "de2d9317-1076-42cf-8bfa-ccba42959961", "address": "fa:16:3e:96:d7:b6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2d9317-10", "ovs_interfaceid": "de2d9317-1076-42cf-8bfa-ccba42959961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2220.000272] env[62875]: DEBUG nova.compute.manager [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Received event network-vif-plugged-de2d9317-1076-42cf-8bfa-ccba42959961 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2220.000488] env[62875]: DEBUG oslo_concurrency.lockutils [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] Acquiring lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.000694] env[62875]: DEBUG oslo_concurrency.lockutils [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] Lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.000867] env[62875]: DEBUG oslo_concurrency.lockutils [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] Lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2220.001051] env[62875]: DEBUG nova.compute.manager [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] No waiting events found dispatching network-vif-plugged-de2d9317-1076-42cf-8bfa-ccba42959961 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2220.001221] env[62875]: WARNING nova.compute.manager [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Received unexpected event network-vif-plugged-de2d9317-1076-42cf-8bfa-ccba42959961 for instance with vm_state building and task_state spawning. [ 2220.001378] env[62875]: DEBUG nova.compute.manager [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Received event network-changed-de2d9317-1076-42cf-8bfa-ccba42959961 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2220.001529] env[62875]: DEBUG nova.compute.manager [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Refreshing instance network info cache due to event network-changed-de2d9317-1076-42cf-8bfa-ccba42959961. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2220.001691] env[62875]: DEBUG oslo_concurrency.lockutils [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] Acquiring lock "refresh_cache-c4b43f9a-9c49-4281-a102-5d34f26cc9df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.103148] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951b32d1-fdb4-4c09-891d-1197d0e01d7d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.110578] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37645d71-3c60-4e77-bc47-114742913849 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.140996] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bde50e4-8ddb-4f37-8749-5bd8f67cdd90 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.147864] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74235a93-9cb5-4cb4-8898-b5794cefba7a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.160453] env[62875]: DEBUG nova.compute.provider_tree [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2220.169095] env[62875]: DEBUG oslo_vmware.api [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180452, 'name': RemoveSnapshot_Task, 'duration_secs': 1.020709} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.169856] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2220.252638] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "refresh_cache-d751c6df-1e27-4b6a-a88a-cd15456914a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.252982] env[62875]: DEBUG nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Instance network_info: |[{"id": "b3066f71-cb70-4af1-bab0-ad595fb59fd9", "address": "fa:16:3e:a3:3c:61", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3066f71-cb", "ovs_interfaceid": "b3066f71-cb70-4af1-bab0-ad595fb59fd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2220.253413] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:3c:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3066f71-cb70-4af1-bab0-ad595fb59fd9', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2220.260980] env[62875]: DEBUG oslo.service.loopingcall [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2220.261184] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2220.261396] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea412aa8-e56a-4d63-bcad-04874d47064d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.281927] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2220.281927] env[62875]: value = "task-2180454" [ 2220.281927] env[62875]: _type = "Task" [ 2220.281927] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.289517] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180454, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.324795] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2220.325040] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2220.325204] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2220.325387] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2220.325535] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2220.325685] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2220.325883] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2220.326053] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2220.326223] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2220.326382] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2220.326565] env[62875]: DEBUG nova.virt.hardware [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2220.331898] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Reconfiguring VM instance instance-0000004f to detach disk 2000 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2220.332193] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff7db649-8bf8-490f-a28b-1de913e2efb5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.351128] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2220.351128] env[62875]: value = "task-2180455" [ 2220.351128] env[62875]: _type = "Task" [ 2220.351128] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.360572] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180455, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.497173] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-c4b43f9a-9c49-4281-a102-5d34f26cc9df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.497794] env[62875]: DEBUG nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Instance network_info: |[{"id": "de2d9317-1076-42cf-8bfa-ccba42959961", "address": "fa:16:3e:96:d7:b6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2d9317-10", "ovs_interfaceid": "de2d9317-1076-42cf-8bfa-ccba42959961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2220.497955] env[62875]: DEBUG oslo_concurrency.lockutils [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] Acquired lock "refresh_cache-c4b43f9a-9c49-4281-a102-5d34f26cc9df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.498209] env[62875]: DEBUG nova.network.neutron [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Refreshing network info cache for port de2d9317-1076-42cf-8bfa-ccba42959961 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2220.499572] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:d7:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b2049d7-f99e-425a-afdb-2c95ca88e483', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de2d9317-1076-42cf-8bfa-ccba42959961', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2220.507097] env[62875]: DEBUG oslo.service.loopingcall [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2220.508129] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2220.508364] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6744ca0-4765-4be4-bbc2-f48c4f19d0b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.527686] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2220.527686] env[62875]: value = "task-2180456" [ 2220.527686] env[62875]: _type = "Task" [ 2220.527686] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.536779] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180456, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.585838] env[62875]: DEBUG nova.compute.manager [req-384eba77-89d0-4b2f-b9ff-10974e2660d9 req-6a3a62e5-78dd-4d71-8137-d6ee8d884f0d service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Received event network-changed-b3066f71-cb70-4af1-bab0-ad595fb59fd9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2220.586039] env[62875]: DEBUG nova.compute.manager [req-384eba77-89d0-4b2f-b9ff-10974e2660d9 req-6a3a62e5-78dd-4d71-8137-d6ee8d884f0d service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Refreshing instance network info cache due to event network-changed-b3066f71-cb70-4af1-bab0-ad595fb59fd9. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2220.586291] env[62875]: DEBUG oslo_concurrency.lockutils [req-384eba77-89d0-4b2f-b9ff-10974e2660d9 req-6a3a62e5-78dd-4d71-8137-d6ee8d884f0d service nova] Acquiring lock "refresh_cache-d751c6df-1e27-4b6a-a88a-cd15456914a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.586451] env[62875]: DEBUG oslo_concurrency.lockutils [req-384eba77-89d0-4b2f-b9ff-10974e2660d9 req-6a3a62e5-78dd-4d71-8137-d6ee8d884f0d service nova] Acquired lock "refresh_cache-d751c6df-1e27-4b6a-a88a-cd15456914a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.586640] env[62875]: DEBUG nova.network.neutron [req-384eba77-89d0-4b2f-b9ff-10974e2660d9 req-6a3a62e5-78dd-4d71-8137-d6ee8d884f0d service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Refreshing network info cache for port b3066f71-cb70-4af1-bab0-ad595fb59fd9 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2220.666188] env[62875]: DEBUG nova.scheduler.client.report [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2220.674899] env[62875]: WARNING nova.compute.manager [None req-9e947e1f-60fe-41df-b72f-09171bb799b4 tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Image not found during snapshot: nova.exception.ImageNotFound: Image e2918bee-ba5b-436b-9276-4c83d567218f could not be found. [ 2220.792051] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180454, 'name': CreateVM_Task, 'duration_secs': 0.360256} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.792051] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2220.792438] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.792608] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.792975] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2220.793254] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36a064cb-3809-4cab-80fc-3194eaa4d5a8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.797904] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2220.797904] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb97f2-ed64-4197-1a2e-87afcc5c66f3" [ 2220.797904] env[62875]: _type = "Task" [ 2220.797904] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.805619] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb97f2-ed64-4197-1a2e-87afcc5c66f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.859147] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180455, 'name': ReconfigVM_Task, 'duration_secs': 0.15079} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.859470] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Reconfigured VM instance instance-0000004f to detach disk 2000 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2220.860248] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c377c768-9720-4d8d-a041-9ca50b417e97 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.882824] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 2a16938e-eeaa-430d-961b-4b060187ba99/2a16938e-eeaa-430d-961b-4b060187ba99.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2220.883116] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-167bef96-6246-4c76-a95b-f07e60010ae4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.903458] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2220.903458] env[62875]: value = "task-2180457" [ 2220.903458] env[62875]: _type = "Task" [ 2220.903458] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.911637] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.040346] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180456, 'name': CreateVM_Task, 'duration_secs': 0.366293} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.042470] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2221.043106] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2221.088870] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.089181] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.089425] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.089640] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.089782] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.093247] env[62875]: INFO nova.compute.manager [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Terminating instance [ 2221.170925] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.191707] env[62875]: INFO nova.scheduler.client.report [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Deleted allocations for instance 2dd748c2-048d-4450-a393-995249a9deb8 [ 2221.211666] env[62875]: DEBUG nova.network.neutron [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Updated VIF entry in instance network info cache for port de2d9317-1076-42cf-8bfa-ccba42959961. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2221.212078] env[62875]: DEBUG nova.network.neutron [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Updating instance_info_cache with network_info: [{"id": "de2d9317-1076-42cf-8bfa-ccba42959961", "address": "fa:16:3e:96:d7:b6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2d9317-10", "ovs_interfaceid": "de2d9317-1076-42cf-8bfa-ccba42959961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2221.309186] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb97f2-ed64-4197-1a2e-87afcc5c66f3, 'name': SearchDatastore_Task, 'duration_secs': 0.009951} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.309443] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2221.309769] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2221.310049] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2221.310206] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2221.310388] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2221.310675] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2221.310989] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2221.311228] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dceb4c27-be0b-49b8-ad2f-28b33b1caec4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.313076] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d68958a-4f18-45f0-8a0c-60f57e8803bc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.318114] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2221.318114] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52292ce8-7602-43b5-caa9-12cef5858720" [ 2221.318114] env[62875]: _type = "Task" [ 2221.318114] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.322043] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2221.322220] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2221.323205] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d5ee2fd-48c1-4e2a-a350-eab804ae4096 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.328429] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52292ce8-7602-43b5-caa9-12cef5858720, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.331114] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2221.331114] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a43312-f648-15cf-e20f-3d600d1cf596" [ 2221.331114] env[62875]: _type = "Task" [ 2221.331114] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.338347] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a43312-f648-15cf-e20f-3d600d1cf596, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.414342] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.438420] env[62875]: DEBUG nova.network.neutron [req-384eba77-89d0-4b2f-b9ff-10974e2660d9 req-6a3a62e5-78dd-4d71-8137-d6ee8d884f0d service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Updated VIF entry in instance network info cache for port b3066f71-cb70-4af1-bab0-ad595fb59fd9. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2221.438772] env[62875]: DEBUG nova.network.neutron [req-384eba77-89d0-4b2f-b9ff-10974e2660d9 req-6a3a62e5-78dd-4d71-8137-d6ee8d884f0d service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Updating instance_info_cache with network_info: [{"id": "b3066f71-cb70-4af1-bab0-ad595fb59fd9", "address": "fa:16:3e:a3:3c:61", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3066f71-cb", "ovs_interfaceid": "b3066f71-cb70-4af1-bab0-ad595fb59fd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2221.599687] env[62875]: DEBUG nova.compute.manager [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2221.599687] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2221.600862] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074f5546-7185-49ed-9c0c-90e9d68ca350 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.609342] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2221.609342] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca59393b-0e8e-49bb-90e7-263800964924 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.615461] env[62875]: DEBUG oslo_vmware.api [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2221.615461] env[62875]: value = "task-2180458" [ 2221.615461] env[62875]: _type = "Task" [ 2221.615461] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.625124] env[62875]: DEBUG oslo_vmware.api [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.700170] env[62875]: DEBUG oslo_concurrency.lockutils [None req-752a8965-244e-4dd5-9760-d5f6bd77e491 tempest-ListServerFiltersTestJSON-1010402857 tempest-ListServerFiltersTestJSON-1010402857-project-member] Lock "2dd748c2-048d-4450-a393-995249a9deb8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.480s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.714794] env[62875]: DEBUG oslo_concurrency.lockutils [req-4f79c43c-bd28-4204-9f20-c459410fe675 req-0c6d6299-0641-4c42-9c81-009fe5ba6b29 service nova] Releasing lock "refresh_cache-c4b43f9a-9c49-4281-a102-5d34f26cc9df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2221.828884] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52292ce8-7602-43b5-caa9-12cef5858720, 'name': SearchDatastore_Task, 'duration_secs': 0.010076} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.829216] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2221.829450] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2221.829685] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2221.840393] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a43312-f648-15cf-e20f-3d600d1cf596, 'name': SearchDatastore_Task, 'duration_secs': 0.008605} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.841145] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc13c546-de93-4250-81a8-be6cced58fd5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.847195] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2221.847195] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cd373-6acd-924f-201d-02c3743e4912" [ 2221.847195] env[62875]: _type = "Task" [ 2221.847195] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.854360] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cd373-6acd-924f-201d-02c3743e4912, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.915930] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180457, 'name': ReconfigVM_Task, 'duration_secs': 0.816659} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.916241] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 2a16938e-eeaa-430d-961b-4b060187ba99/2a16938e-eeaa-430d-961b-4b060187ba99.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2221.916508] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance '2a16938e-eeaa-430d-961b-4b060187ba99' progress to 50 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2221.942053] env[62875]: DEBUG oslo_concurrency.lockutils [req-384eba77-89d0-4b2f-b9ff-10974e2660d9 req-6a3a62e5-78dd-4d71-8137-d6ee8d884f0d service nova] Releasing lock "refresh_cache-d751c6df-1e27-4b6a-a88a-cd15456914a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2222.126937] env[62875]: DEBUG oslo_vmware.api [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180458, 'name': PowerOffVM_Task, 'duration_secs': 0.164016} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.126937] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2222.127177] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2222.127301] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b147c38b-b821-419e-954f-3d0792ce8e8c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.358759] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cd373-6acd-924f-201d-02c3743e4912, 'name': SearchDatastore_Task, 'duration_secs': 0.011885} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.359089] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2222.359529] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] d751c6df-1e27-4b6a-a88a-cd15456914a1/d751c6df-1e27-4b6a-a88a-cd15456914a1.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2222.359901] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2222.360138] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2222.360332] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46224a4f-ac60-4e37-84a6-9a4352ff9788 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.362736] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f05ac36-f1fb-45a2-b534-bf7ad98fea03 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.370266] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2222.370266] env[62875]: value = "task-2180460" [ 2222.370266] env[62875]: _type = "Task" [ 2222.370266] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.371683] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2222.371683] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2222.374925] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1f486b2-2d27-4600-bbde-0047c1c0e810 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.380428] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2222.380428] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c1aa1b-36c1-3ffe-0677-67e8c5b6d70a" [ 2222.380428] env[62875]: _type = "Task" [ 2222.380428] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.383783] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.391248] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c1aa1b-36c1-3ffe-0677-67e8c5b6d70a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.423049] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df878a7a-a1e8-437d-95b0-d4d3b5994ede {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.443220] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cc6e5d-ca2e-4268-b00d-7d687968ba20 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.461670] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance '2a16938e-eeaa-430d-961b-4b060187ba99' progress to 67 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2222.498558] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.546347] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2222.546601] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2222.546823] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleting the datastore file [datastore1] 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2222.547141] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86997f71-0778-4849-ae85-93c65c979563 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.554439] env[62875]: DEBUG oslo_vmware.api [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for the task: (returnval){ [ 2222.554439] env[62875]: value = "task-2180461" [ 2222.554439] env[62875]: _type = "Task" [ 2222.554439] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.562519] env[62875]: DEBUG oslo_vmware.api [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.880685] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483929} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.880943] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] d751c6df-1e27-4b6a-a88a-cd15456914a1/d751c6df-1e27-4b6a-a88a-cd15456914a1.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2222.881175] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2222.881423] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29b1d208-ce54-4d37-9d14-96af9babbe16 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.888480] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2222.888480] env[62875]: value = "task-2180462" [ 2222.888480] env[62875]: _type = "Task" [ 2222.888480] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.894904] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c1aa1b-36c1-3ffe-0677-67e8c5b6d70a, 'name': SearchDatastore_Task, 'duration_secs': 0.011802} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.896236] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acbd47cb-b2ad-4139-a373-2a560604b833 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.903393] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.906406] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2222.906406] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52491cdd-0499-3b80-8af9-402a2e8d1459" [ 2222.906406] env[62875]: _type = "Task" [ 2222.906406] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.914693] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52491cdd-0499-3b80-8af9-402a2e8d1459, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.004380] env[62875]: DEBUG nova.network.neutron [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Port acf0849a-ab7f-4949-951d-8268baee804c binding to destination host cpu-1 is already ACTIVE {{(pid=62875) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2223.005712] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.005862] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2223.006027] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 2223.065292] env[62875]: DEBUG oslo_vmware.api [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Task: {'id': task-2180461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310431} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.065539] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2223.065722] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2223.065895] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2223.066075] env[62875]: INFO nova.compute.manager [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Took 1.47 seconds to destroy the instance on the hypervisor. [ 2223.066460] env[62875]: DEBUG oslo.service.loopingcall [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2223.066535] env[62875]: DEBUG nova.compute.manager [-] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2223.066596] env[62875]: DEBUG nova.network.neutron [-] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2223.310892] env[62875]: DEBUG nova.compute.manager [req-d20ff92c-2b8b-430c-a0dd-41796f231fb1 req-691bb914-bf12-4563-983d-95d4730d1a6e service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Received event network-vif-deleted-c2a66c75-6cc6-4364-b6f7-c6ad771d208b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2223.310892] env[62875]: INFO nova.compute.manager [req-d20ff92c-2b8b-430c-a0dd-41796f231fb1 req-691bb914-bf12-4563-983d-95d4730d1a6e service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Neutron deleted interface c2a66c75-6cc6-4364-b6f7-c6ad771d208b; detaching it from the instance and deleting it from the info cache [ 2223.310892] env[62875]: DEBUG nova.network.neutron [req-d20ff92c-2b8b-430c-a0dd-41796f231fb1 req-691bb914-bf12-4563-983d-95d4730d1a6e service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2223.398264] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06806} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.398561] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2223.399324] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e9e594-bb6d-47c8-aa17-2d83fa8c5cb6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.425235] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] d751c6df-1e27-4b6a-a88a-cd15456914a1/d751c6df-1e27-4b6a-a88a-cd15456914a1.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2223.428409] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c0bb20f-af6c-4cc6-9314-cf3adffb2574 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.448925] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52491cdd-0499-3b80-8af9-402a2e8d1459, 'name': SearchDatastore_Task, 'duration_secs': 0.008915} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.450132] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2223.450393] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] c4b43f9a-9c49-4281-a102-5d34f26cc9df/c4b43f9a-9c49-4281-a102-5d34f26cc9df.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2223.450695] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2223.450695] env[62875]: value = "task-2180463" [ 2223.450695] env[62875]: _type = "Task" [ 2223.450695] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.451164] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e805e68-0a38-412b-b3b0-48efa7a49713 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.461914] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180463, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.463149] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2223.463149] env[62875]: value = "task-2180464" [ 2223.463149] env[62875]: _type = "Task" [ 2223.463149] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.471360] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180464, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.516238] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Skipping network cache update for instance because it is being deleted. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 2223.516397] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 2223.516519] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 2223.553669] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.553867] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2223.554092] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2223.554311] env[62875]: DEBUG nova.objects.instance [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lazy-loading 'info_cache' on Instance uuid 7969485a-ccd6-48e0-bdea-b8920af28843 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2223.795822] env[62875]: DEBUG nova.network.neutron [-] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2223.816025] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f88fd7f-8e6d-4194-9264-16c59b6c9f57 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.826847] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066ff366-019d-46f2-be5e-f24ca5d66f7f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.871996] env[62875]: DEBUG nova.compute.manager [req-d20ff92c-2b8b-430c-a0dd-41796f231fb1 req-691bb914-bf12-4563-983d-95d4730d1a6e service nova] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Detach interface failed, port_id=c2a66c75-6cc6-4364-b6f7-c6ad771d208b, reason: Instance 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2223.964480] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180463, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.980019] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180464, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493467} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.980019] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] c4b43f9a-9c49-4281-a102-5d34f26cc9df/c4b43f9a-9c49-4281-a102-5d34f26cc9df.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2223.980019] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2223.980019] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89662049-18ed-4379-9795-1e40e4fdbc50 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.985906] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2223.985906] env[62875]: value = "task-2180465" [ 2223.985906] env[62875]: _type = "Task" [ 2223.985906] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.996176] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180465, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.043397] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.043882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.044176] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.298054] env[62875]: INFO nova.compute.manager [-] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Took 1.23 seconds to deallocate network for instance. [ 2224.464111] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180463, 'name': ReconfigVM_Task, 'duration_secs': 0.560683} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.464416] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Reconfigured VM instance instance-00000052 to attach disk [datastore2] d751c6df-1e27-4b6a-a88a-cd15456914a1/d751c6df-1e27-4b6a-a88a-cd15456914a1.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2224.465065] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a0cb7db-262e-48a8-99df-d289a91ee22c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.472093] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2224.472093] env[62875]: value = "task-2180466" [ 2224.472093] env[62875]: _type = "Task" [ 2224.472093] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.480494] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180466, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.495102] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180465, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079922} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.495441] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2224.496257] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44364e9-23be-4a6c-b97c-24299f4c1f62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.520717] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] c4b43f9a-9c49-4281-a102-5d34f26cc9df/c4b43f9a-9c49-4281-a102-5d34f26cc9df.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2224.521079] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57cb15a2-bb34-417c-91d8-d69a82e579ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.543605] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2224.543605] env[62875]: value = "task-2180467" [ 2224.543605] env[62875]: _type = "Task" [ 2224.543605] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.556628] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180467, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.810768] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.811071] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.811299] env[62875]: DEBUG nova.objects.instance [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lazy-loading 'resources' on Instance uuid 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2224.982769] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180466, 'name': Rename_Task, 'duration_secs': 0.140452} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.983394] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2224.983771] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebdb4d7a-8f6b-4943-8da1-946eda571139 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.990232] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2224.990232] env[62875]: value = "task-2180468" [ 2224.990232] env[62875]: _type = "Task" [ 2224.990232] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.997924] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180468, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.056668] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180467, 'name': ReconfigVM_Task, 'duration_secs': 0.299147} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.057484] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Reconfigured VM instance instance-00000051 to attach disk [datastore2] c4b43f9a-9c49-4281-a102-5d34f26cc9df/c4b43f9a-9c49-4281-a102-5d34f26cc9df.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2225.057777] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-096af905-65bb-4d8c-987a-11043b748046 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.064747] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2225.064747] env[62875]: value = "task-2180469" [ 2225.064747] env[62875]: _type = "Task" [ 2225.064747] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2225.073542] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180469, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.093071] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2225.093356] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2225.093928] env[62875]: DEBUG nova.network.neutron [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2225.316821] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updating instance_info_cache with network_info: [{"id": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "address": "fa:16:3e:e9:29:5c", "network": {"id": "3028cdbc-4b41-4102-bd32-59fd93a60bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-125987334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9e631a043ef4ed9ae37c18a142afa38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87f73d-31", "ovs_interfaceid": "6c87f73d-311a-4ed3-9d9f-5325a201e67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.443783] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19fab18-f8f1-4f0a-b8e7-0f7bc5b75ef8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.452304] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3bcf10-3ad9-4165-b241-241362d4c5f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.488755] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c3a481-d99b-44cd-8ade-054284c66c9b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.497026] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cf9d74-bc62-4d2a-8e44-25a168366030 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.505032] env[62875]: DEBUG oslo_vmware.api [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180468, 'name': PowerOnVM_Task, 'duration_secs': 0.464022} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.512769] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2225.513156] env[62875]: INFO nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Took 11.21 seconds to spawn the instance on the hypervisor. [ 2225.515026] env[62875]: DEBUG nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2225.515026] env[62875]: DEBUG nova.compute.provider_tree [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2225.515659] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ff0bac-7581-4132-9f0b-4642f77e9425 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.575076] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180469, 'name': Rename_Task, 'duration_secs': 0.133183} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.575353] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2225.575595] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24f7961f-8284-43a4-b7ca-3322a0be8658 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.582274] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2225.582274] env[62875]: value = "task-2180470" [ 2225.582274] env[62875]: _type = "Task" [ 2225.582274] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2225.589562] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180470, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.805959] env[62875]: DEBUG nova.network.neutron [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance_info_cache with network_info: [{"id": "acf0849a-ab7f-4949-951d-8268baee804c", "address": "fa:16:3e:29:a5:31", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf0849a-ab", "ovs_interfaceid": "acf0849a-ab7f-4949-951d-8268baee804c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.823559] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-7969485a-ccd6-48e0-bdea-b8920af28843" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2225.823768] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2225.824296] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2225.864276] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquiring lock "f629aa16-0442-4659-9a9c-30f10136ae84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2225.864519] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "f629aa16-0442-4659-9a9c-30f10136ae84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.019241] env[62875]: DEBUG nova.scheduler.client.report [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2226.030881] env[62875]: INFO nova.compute.manager [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Took 15.98 seconds to build instance. [ 2226.092991] env[62875]: DEBUG oslo_vmware.api [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180470, 'name': PowerOnVM_Task, 'duration_secs': 0.449422} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.092991] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2226.092991] env[62875]: INFO nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Took 14.12 seconds to spawn the instance on the hypervisor. [ 2226.093174] env[62875]: DEBUG nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2226.093990] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a8929d-35bc-40ca-b2ed-ef3d1d06076a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.308613] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.327392] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.366920] env[62875]: DEBUG nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2226.525825] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.527292] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.200s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.527519] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.527705] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2226.528675] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b426e78b-5827-45e4-ac57-b459b2612c3e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.533120] env[62875]: DEBUG oslo_concurrency.lockutils [None req-7f799353-b36a-4a13-8bdb-2eb3ae3f2f03 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d751c6df-1e27-4b6a-a88a-cd15456914a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.487s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.537337] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20c9b58-d74b-4a43-b983-b30b84c3f7c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.553410] env[62875]: INFO nova.scheduler.client.report [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Deleted allocations for instance 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513 [ 2226.555213] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8760a8a-3a6a-434c-bcd0-ed7c5df1255f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.565622] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfc6a23-499d-446e-9558-4297680155dc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.600478] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179909MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2226.600478] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.600478] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.610582] env[62875]: INFO nova.compute.manager [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Took 20.16 seconds to build instance. [ 2226.832937] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b90ffc-83a2-48e1-8a79-d9d9ca033689 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.852707] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4138be-1f86-42d7-b99e-5a27bbe5f707 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.859894] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance '2a16938e-eeaa-430d-961b-4b060187ba99' progress to 83 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2226.889588] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.064738] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9d088fd6-1a01-4acb-9960-47b6a8e619eb tempest-ImagesTestJSON-2014349889 tempest-ImagesTestJSON-2014349889-project-member] Lock "7b3b22c7-26e2-46bf-82b4-8a2b1e68d513" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.975s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.104979] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40aef7b6-98c1-4dd4-abf7-4065cc6c3b4f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.113110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d5ba95a5-581e-4da2-8b06-fc893f2d18b3 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.667s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.117137] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a39c0d-4f41-4a00-af8e-b34635596c80 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Suspending the VM {{(pid=62875) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2227.117373] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-dca6992a-f2c5-434d-a249-f1c7c569b875 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.123731] env[62875]: DEBUG oslo_vmware.api [None req-e8a39c0d-4f41-4a00-af8e-b34635596c80 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2227.123731] env[62875]: value = "task-2180471" [ 2227.123731] env[62875]: _type = "Task" [ 2227.123731] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.131979] env[62875]: DEBUG oslo_vmware.api [None req-e8a39c0d-4f41-4a00-af8e-b34635596c80 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180471, 'name': SuspendVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.296663] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "d751c6df-1e27-4b6a-a88a-cd15456914a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.296966] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d751c6df-1e27-4b6a-a88a-cd15456914a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.297198] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "d751c6df-1e27-4b6a-a88a-cd15456914a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.297383] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d751c6df-1e27-4b6a-a88a-cd15456914a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.297554] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d751c6df-1e27-4b6a-a88a-cd15456914a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.300034] env[62875]: INFO nova.compute.manager [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Terminating instance [ 2227.366270] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2227.366546] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce9ea116-7b89-41a9-a7c3-00babcb39dc8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.374309] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2227.374309] env[62875]: value = "task-2180472" [ 2227.374309] env[62875]: _type = "Task" [ 2227.374309] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.383573] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.613868] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Applying migration context for instance 2a16938e-eeaa-430d-961b-4b060187ba99 as it has an incoming, in-progress migration 3964c284-0811-449d-8064-51072de6a67a. Migration status is post-migrating {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2227.615144] env[62875]: INFO nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating resource usage from migration 3964c284-0811-449d-8064-51072de6a67a [ 2227.634047] env[62875]: DEBUG oslo_vmware.api [None req-e8a39c0d-4f41-4a00-af8e-b34635596c80 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180471, 'name': SuspendVM_Task} progress is 70%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.634982] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7969485a-ccd6-48e0-bdea-b8920af28843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2227.635122] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 9e0aaea6-96cf-494d-9f70-a709a47f9772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2227.635253] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2227.635371] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 70547fbd-7ce8-466e-8abc-b490b8dd6b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2227.635500] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 45403db3-ff20-42d3-8a37-8db671d8c1fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2227.635615] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance c4b43f9a-9c49-4281-a102-5d34f26cc9df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2227.635836] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d751c6df-1e27-4b6a-a88a-cd15456914a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2227.635836] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Migration 3964c284-0811-449d-8064-51072de6a67a is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2227.636077] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2a16938e-eeaa-430d-961b-4b060187ba99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2227.804133] env[62875]: DEBUG nova.compute.manager [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2227.804411] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2227.805312] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dc067a-c146-4bd0-aea1-34a8e2c6b9cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.813584] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2227.813895] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee8ae755-88b2-4a64-8096-5d419b0ad96a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.819659] env[62875]: DEBUG oslo_vmware.api [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2227.819659] env[62875]: value = "task-2180473" [ 2227.819659] env[62875]: _type = "Task" [ 2227.819659] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.833397] env[62875]: DEBUG oslo_vmware.api [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.886044] env[62875]: DEBUG oslo_vmware.api [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180472, 'name': PowerOnVM_Task, 'duration_secs': 0.395943} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.886440] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2227.886660] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79168071-682c-4d03-b142-a803563bbc87 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance '2a16938e-eeaa-430d-961b-4b060187ba99' progress to 100 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2228.134287] env[62875]: DEBUG oslo_vmware.api [None req-e8a39c0d-4f41-4a00-af8e-b34635596c80 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180471, 'name': SuspendVM_Task, 'duration_secs': 0.558691} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.134557] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a39c0d-4f41-4a00-af8e-b34635596c80 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Suspended the VM {{(pid=62875) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2228.134736] env[62875]: DEBUG nova.compute.manager [None req-e8a39c0d-4f41-4a00-af8e-b34635596c80 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2228.135501] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3f7505-2034-4092-a842-a838f2ed21fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.139760] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance f629aa16-0442-4659-9a9c-30f10136ae84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2228.139994] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2228.140166] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2228.260825] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c622992f-4c67-4fe7-9441-ef45a973e236 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.268010] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bff0540-6090-4bdd-9d12-875e647b8315 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.298265] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf24b6f-5bb8-4ddb-b697-17ea48b74fc0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.305704] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bdd595-fa45-4c47-8799-e8ffc604d6d4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.318709] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2228.327855] env[62875]: DEBUG oslo_vmware.api [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180473, 'name': PowerOffVM_Task, 'duration_secs': 0.200441} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.328219] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2228.328555] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2228.329328] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-005338b3-b0cf-4d82-90f9-ce669722428f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.790088] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2228.790396] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2228.790522] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleting the datastore file [datastore2] d751c6df-1e27-4b6a-a88a-cd15456914a1 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2228.790794] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0239cee9-b594-4d77-8545-0a097f2f8f99 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.796888] env[62875]: DEBUG oslo_vmware.api [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2228.796888] env[62875]: value = "task-2180475" [ 2228.796888] env[62875]: _type = "Task" [ 2228.796888] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.805239] env[62875]: DEBUG oslo_vmware.api [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180475, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.821213] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2229.311217] env[62875]: DEBUG oslo_vmware.api [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180475, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415816} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.311521] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2229.311707] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2229.311920] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2229.312115] env[62875]: INFO nova.compute.manager [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Took 1.51 seconds to destroy the instance on the hypervisor. [ 2229.312366] env[62875]: DEBUG oslo.service.loopingcall [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2229.312561] env[62875]: DEBUG nova.compute.manager [-] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2229.312660] env[62875]: DEBUG nova.network.neutron [-] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2229.326367] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2229.327738] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.726s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.327738] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.437s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.328747] env[62875]: INFO nova.compute.claims [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2229.584064] env[62875]: DEBUG nova.compute.manager [req-3f1b83e0-8ff8-41d8-a2d5-9fa49686a6c4 req-afee9430-a8ae-42cd-9459-ba9a428e6946 service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Received event network-vif-deleted-b3066f71-cb70-4af1-bab0-ad595fb59fd9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2229.584283] env[62875]: INFO nova.compute.manager [req-3f1b83e0-8ff8-41d8-a2d5-9fa49686a6c4 req-afee9430-a8ae-42cd-9459-ba9a428e6946 service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Neutron deleted interface b3066f71-cb70-4af1-bab0-ad595fb59fd9; detaching it from the instance and deleting it from the info cache [ 2229.584461] env[62875]: DEBUG nova.network.neutron [req-3f1b83e0-8ff8-41d8-a2d5-9fa49686a6c4 req-afee9430-a8ae-42cd-9459-ba9a428e6946 service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2229.620173] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.620745] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.620745] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.620870] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.620958] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.624861] env[62875]: INFO nova.compute.manager [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Terminating instance [ 2229.884339] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "2a16938e-eeaa-430d-961b-4b060187ba99" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.884621] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.884797] env[62875]: DEBUG nova.compute.manager [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Going to confirm migration 2 {{(pid=62875) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2230.067368] env[62875]: DEBUG nova.network.neutron [-] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2230.087821] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2d50ccb-8251-451e-87b5-8294add3bc5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.097878] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6705297-5408-4b3f-b1f6-2186c860387e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.125266] env[62875]: DEBUG nova.compute.manager [req-3f1b83e0-8ff8-41d8-a2d5-9fa49686a6c4 req-afee9430-a8ae-42cd-9459-ba9a428e6946 service nova] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Detach interface failed, port_id=b3066f71-cb70-4af1-bab0-ad595fb59fd9, reason: Instance d751c6df-1e27-4b6a-a88a-cd15456914a1 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2230.128131] env[62875]: DEBUG nova.compute.manager [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2230.128326] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2230.129101] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121ebabf-8a7d-463b-803a-4d2b30168c28 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.135791] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2230.136031] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6335541b-749b-4f32-a4d2-89032af6db24 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.471158] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2230.471348] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2230.471526] env[62875]: DEBUG nova.network.neutron [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2230.471706] env[62875]: DEBUG nova.objects.instance [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lazy-loading 'info_cache' on Instance uuid 2a16938e-eeaa-430d-961b-4b060187ba99 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2230.478502] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f193c84-b67d-475a-827a-7e40524eb643 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.486875] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d40de5-259d-4376-8d7d-c64916f7147c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.517307] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e40af2-411a-4cd1-9ffc-65d070129ae3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.525459] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bd3696-0104-4d73-a88b-c21a5eca684d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.540698] env[62875]: DEBUG nova.compute.provider_tree [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2230.569919] env[62875]: INFO nova.compute.manager [-] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Took 1.26 seconds to deallocate network for instance. [ 2231.044251] env[62875]: DEBUG nova.scheduler.client.report [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2231.076341] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2231.549249] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.222s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.550129] env[62875]: DEBUG nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2231.552414] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.476s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.552630] env[62875]: DEBUG nova.objects.instance [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lazy-loading 'resources' on Instance uuid d751c6df-1e27-4b6a-a88a-cd15456914a1 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2231.696754] env[62875]: DEBUG nova.network.neutron [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance_info_cache with network_info: [{"id": "acf0849a-ab7f-4949-951d-8268baee804c", "address": "fa:16:3e:29:a5:31", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf0849a-ab", "ovs_interfaceid": "acf0849a-ab7f-4949-951d-8268baee804c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2232.055586] env[62875]: DEBUG nova.compute.utils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2232.059984] env[62875]: DEBUG nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2232.060148] env[62875]: DEBUG nova.network.neutron [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2232.100039] env[62875]: DEBUG nova.policy [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df8691f7b70e457da71b40f76fd7f752', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9bd9a8695f474c76afac0461f442839d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2232.183695] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702fe166-4dcf-4da9-9574-7bce00e6dc6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.191564] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45063d1-edf7-4484-9f22-7d578e63add1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.223545] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-2a16938e-eeaa-430d-961b-4b060187ba99" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2232.223857] env[62875]: DEBUG nova.objects.instance [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lazy-loading 'migration_context' on Instance uuid 2a16938e-eeaa-430d-961b-4b060187ba99 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2232.225934] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e529393b-06e1-4661-82e7-38d1a3757102 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.233222] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d96d7be-328b-4376-9cc4-485718436448 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.246902] env[62875]: DEBUG nova.compute.provider_tree [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2232.385853] env[62875]: DEBUG nova.network.neutron [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Successfully created port: c6e61eaf-2f31-4315-887e-aeeb76726046 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2232.560117] env[62875]: DEBUG nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2232.727181] env[62875]: DEBUG nova.objects.base [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Object Instance<2a16938e-eeaa-430d-961b-4b060187ba99> lazy-loaded attributes: info_cache,migration_context {{(pid=62875) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2232.728122] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f5d993-9df5-4503-8b99-a178d64455ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.747523] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddb17bde-cdaa-4ace-93eb-a03ba825f271 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.750360] env[62875]: DEBUG nova.scheduler.client.report [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2232.756858] env[62875]: DEBUG oslo_vmware.api [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2232.756858] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52376a5c-25c4-d95e-3c38-94d17f1cc474" [ 2232.756858] env[62875]: _type = "Task" [ 2232.756858] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.765569] env[62875]: DEBUG oslo_vmware.api [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52376a5c-25c4-d95e-3c38-94d17f1cc474, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.255770] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.703s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.267758] env[62875]: DEBUG oslo_vmware.api [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52376a5c-25c4-d95e-3c38-94d17f1cc474, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2233.268102] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.268373] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.277498] env[62875]: INFO nova.scheduler.client.report [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted allocations for instance d751c6df-1e27-4b6a-a88a-cd15456914a1 [ 2233.569527] env[62875]: DEBUG nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2233.589562] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2233.589967] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2233.590260] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2233.590585] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2233.590857] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2233.591143] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2233.591501] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2233.591800] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2233.592113] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2233.592393] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2233.592681] env[62875]: DEBUG nova.virt.hardware [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2233.594033] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9960fa29-e928-4d7f-bc5a-fbfd7080ed6e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.605035] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28da66b-fb98-4bb3-bc97-61356b046ace {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.784932] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a08e913b-be71-49f1-ab60-2222e0a37e87 tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "d751c6df-1e27-4b6a-a88a-cd15456914a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.488s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.890759] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5204c1-a93c-4c25-9e85-068a6b6d3ca2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.899630] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef0516f-243a-4382-a6fd-d0a0cdb5c07b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.930635] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5749dcde-0030-4370-86ab-9598f779d1af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.939260] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e67bf66-97bc-4590-9dac-649fc42ec657 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.953030] env[62875]: DEBUG nova.compute.provider_tree [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2234.456355] env[62875]: DEBUG nova.scheduler.client.report [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2234.469734] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.469970] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.972607] env[62875]: DEBUG nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2235.469717] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.201s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.498977] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.499319] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.500884] env[62875]: INFO nova.compute.claims [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2236.025811] env[62875]: INFO nova.scheduler.client.report [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted allocation for migration 3964c284-0811-449d-8064-51072de6a67a [ 2236.076659] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "9b3628be-b8a3-4105-bc84-088dede23aaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.076890] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "9b3628be-b8a3-4105-bc84-088dede23aaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.265959] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.266200] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.531290] env[62875]: DEBUG oslo_concurrency.lockutils [None req-eedd2255-b185-4aa5-97f1-72f16e42b81c tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.646s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.578803] env[62875]: DEBUG nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2236.653794] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4e1547-ab66-4031-8584-c84a184c18b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.661865] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc62a29d-19e9-40ff-8297-a3a6fb67a341 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.693677] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776ca6d2-5dc6-4f61-8a27-06ed88b7c790 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.700921] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1737ffe3-8f8b-4db3-a1ad-4cb4948101ee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.714101] env[62875]: DEBUG nova.compute.provider_tree [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2236.769575] env[62875]: DEBUG nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2237.026091] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "2a16938e-eeaa-430d-961b-4b060187ba99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.026330] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.026549] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.026734] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.026904] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.029074] env[62875]: INFO nova.compute.manager [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Terminating instance [ 2237.097766] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.170394] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2237.170616] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2237.170799] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleting the datastore file [datastore2] c4b43f9a-9c49-4281-a102-5d34f26cc9df {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2237.171074] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7a1dde6-c0c8-4a4f-aab3-e46cf04b7a85 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.178095] env[62875]: DEBUG oslo_vmware.api [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2237.178095] env[62875]: value = "task-2180477" [ 2237.178095] env[62875]: _type = "Task" [ 2237.178095] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.185825] env[62875]: DEBUG oslo_vmware.api [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.216919] env[62875]: DEBUG nova.scheduler.client.report [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2237.286110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.446798] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "3458fc2b-a241-4492-9b65-f89b955b8c0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.447067] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "3458fc2b-a241-4492-9b65-f89b955b8c0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.532447] env[62875]: DEBUG nova.compute.manager [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2237.532791] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2237.533941] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9db8630-502e-4a3b-8ddd-706d4a217def {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.542790] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2237.543048] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d72a3850-3aed-49eb-b795-6c97f9b60260 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.548898] env[62875]: DEBUG oslo_vmware.api [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2237.548898] env[62875]: value = "task-2180478" [ 2237.548898] env[62875]: _type = "Task" [ 2237.548898] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.556283] env[62875]: DEBUG oslo_vmware.api [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180478, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.687753] env[62875]: DEBUG oslo_vmware.api [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143889} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.688034] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2237.688212] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2237.688390] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2237.688568] env[62875]: INFO nova.compute.manager [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Took 7.56 seconds to destroy the instance on the hypervisor. [ 2237.688819] env[62875]: DEBUG oslo.service.loopingcall [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2237.689015] env[62875]: DEBUG nova.compute.manager [-] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2237.689125] env[62875]: DEBUG nova.network.neutron [-] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2237.721740] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.222s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.722446] env[62875]: DEBUG nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2237.725297] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.628s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.726885] env[62875]: INFO nova.compute.claims [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2237.949267] env[62875]: DEBUG nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2237.980820] env[62875]: DEBUG nova.compute.manager [req-82c90a5c-d3dd-4523-bea0-aa4ddfa5dd08 req-8d3981c8-7075-4b22-8d85-e9fa4e51908e service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Received event network-vif-deleted-de2d9317-1076-42cf-8bfa-ccba42959961 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2237.981041] env[62875]: INFO nova.compute.manager [req-82c90a5c-d3dd-4523-bea0-aa4ddfa5dd08 req-8d3981c8-7075-4b22-8d85-e9fa4e51908e service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Neutron deleted interface de2d9317-1076-42cf-8bfa-ccba42959961; detaching it from the instance and deleting it from the info cache [ 2237.981228] env[62875]: DEBUG nova.network.neutron [req-82c90a5c-d3dd-4523-bea0-aa4ddfa5dd08 req-8d3981c8-7075-4b22-8d85-e9fa4e51908e service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2238.059351] env[62875]: DEBUG oslo_vmware.api [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180478, 'name': PowerOffVM_Task, 'duration_secs': 0.192437} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2238.059623] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2238.059791] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2238.060082] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06d35ddc-0b9d-4645-ae05-0ce01eb9e49b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.215040] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2238.215040] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2238.215040] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleting the datastore file [datastore1] 2a16938e-eeaa-430d-961b-4b060187ba99 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2238.215308] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e88900a-56cc-4f3a-9d0a-a130e94d79e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.221635] env[62875]: DEBUG oslo_vmware.api [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2238.221635] env[62875]: value = "task-2180480" [ 2238.221635] env[62875]: _type = "Task" [ 2238.221635] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2238.229482] env[62875]: DEBUG oslo_vmware.api [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.231811] env[62875]: DEBUG nova.compute.utils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2238.235235] env[62875]: DEBUG nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2238.235422] env[62875]: DEBUG nova.network.neutron [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2238.273246] env[62875]: DEBUG nova.compute.manager [req-cd7cd037-553b-4a02-bf61-012f6c651b33 req-7901dde1-cc0a-402a-bf98-ff82e9621ba2 service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Received event network-vif-plugged-c6e61eaf-2f31-4315-887e-aeeb76726046 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2238.273246] env[62875]: DEBUG oslo_concurrency.lockutils [req-cd7cd037-553b-4a02-bf61-012f6c651b33 req-7901dde1-cc0a-402a-bf98-ff82e9621ba2 service nova] Acquiring lock "f629aa16-0442-4659-9a9c-30f10136ae84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2238.273246] env[62875]: DEBUG oslo_concurrency.lockutils [req-cd7cd037-553b-4a02-bf61-012f6c651b33 req-7901dde1-cc0a-402a-bf98-ff82e9621ba2 service nova] Lock "f629aa16-0442-4659-9a9c-30f10136ae84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2238.273246] env[62875]: DEBUG oslo_concurrency.lockutils [req-cd7cd037-553b-4a02-bf61-012f6c651b33 req-7901dde1-cc0a-402a-bf98-ff82e9621ba2 service nova] Lock "f629aa16-0442-4659-9a9c-30f10136ae84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2238.273246] env[62875]: DEBUG nova.compute.manager [req-cd7cd037-553b-4a02-bf61-012f6c651b33 req-7901dde1-cc0a-402a-bf98-ff82e9621ba2 service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] No waiting events found dispatching network-vif-plugged-c6e61eaf-2f31-4315-887e-aeeb76726046 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2238.273246] env[62875]: WARNING nova.compute.manager [req-cd7cd037-553b-4a02-bf61-012f6c651b33 req-7901dde1-cc0a-402a-bf98-ff82e9621ba2 service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Received unexpected event network-vif-plugged-c6e61eaf-2f31-4315-887e-aeeb76726046 for instance with vm_state building and task_state spawning. [ 2238.274963] env[62875]: DEBUG nova.policy [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3d0e175791341aea0db00ef8a1b5680', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '226340868e7446cca12688a32d13c630', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2238.362098] env[62875]: DEBUG nova.network.neutron [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Successfully updated port: c6e61eaf-2f31-4315-887e-aeeb76726046 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2238.465054] env[62875]: DEBUG nova.network.neutron [-] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2238.471088] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2238.484088] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-842546a9-ec23-4f24-878e-03f78a82e1ca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.494334] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882d4553-ceeb-4a28-ad31-4a1eb667f8f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.521614] env[62875]: DEBUG nova.compute.manager [req-82c90a5c-d3dd-4523-bea0-aa4ddfa5dd08 req-8d3981c8-7075-4b22-8d85-e9fa4e51908e service nova] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Detach interface failed, port_id=de2d9317-1076-42cf-8bfa-ccba42959961, reason: Instance c4b43f9a-9c49-4281-a102-5d34f26cc9df could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2238.735333] env[62875]: DEBUG oslo_vmware.api [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136853} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2238.735952] env[62875]: DEBUG nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2238.740834] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2238.741098] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2238.741388] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2238.741674] env[62875]: INFO nova.compute.manager [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Took 1.21 seconds to destroy the instance on the hypervisor. [ 2238.742070] env[62875]: DEBUG oslo.service.loopingcall [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2238.746699] env[62875]: DEBUG nova.compute.manager [-] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2238.746874] env[62875]: DEBUG nova.network.neutron [-] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2238.804553] env[62875]: DEBUG nova.network.neutron [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Successfully created port: 0997f285-a150-4272-8178-5e00f47156d5 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2238.865235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquiring lock "refresh_cache-f629aa16-0442-4659-9a9c-30f10136ae84" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2238.865235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquired lock "refresh_cache-f629aa16-0442-4659-9a9c-30f10136ae84" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2238.865235] env[62875]: DEBUG nova.network.neutron [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2238.925025] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a5684e-4a1d-4fb7-bf0d-c3b212e12ee6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.929193] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b22e38-c16b-4da6-b8af-2e20ae402d03 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.962540] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529587c4-7838-447e-8590-2aeb79d8a03b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.968078] env[62875]: INFO nova.compute.manager [-] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Took 1.28 seconds to deallocate network for instance. [ 2238.975593] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32233d2-417e-434d-8b90-eb93aa06a13f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.996171] env[62875]: DEBUG nova.compute.provider_tree [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2239.398065] env[62875]: DEBUG nova.network.neutron [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2239.482829] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.499580] env[62875]: DEBUG nova.scheduler.client.report [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2239.504259] env[62875]: DEBUG nova.network.neutron [-] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2239.523835] env[62875]: DEBUG nova.network.neutron [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Updating instance_info_cache with network_info: [{"id": "c6e61eaf-2f31-4315-887e-aeeb76726046", "address": "fa:16:3e:33:bd:b7", "network": {"id": "1c1c83a1-bff8-41d2-a1b3-f77e1fc29db4", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-595354796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bd9a8695f474c76afac0461f442839d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e61eaf-2f", "ovs_interfaceid": "c6e61eaf-2f31-4315-887e-aeeb76726046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2239.749433] env[62875]: DEBUG nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2239.773467] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2239.773719] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2239.773873] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2239.774065] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2239.774214] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2239.774358] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2239.774557] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2239.774710] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2239.774869] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2239.775132] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2239.775224] env[62875]: DEBUG nova.virt.hardware [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2239.776107] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8245f44-ea1b-4be4-8ac6-c4fd89045285 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.784347] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1942b332-9040-4495-b8c5-9f08ccba9d15 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.004983] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2240.005527] env[62875]: DEBUG nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2240.008034] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.722s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2240.009711] env[62875]: INFO nova.compute.claims [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2240.012060] env[62875]: INFO nova.compute.manager [-] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Took 1.27 seconds to deallocate network for instance. [ 2240.025549] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Releasing lock "refresh_cache-f629aa16-0442-4659-9a9c-30f10136ae84" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2240.025858] env[62875]: DEBUG nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Instance network_info: |[{"id": "c6e61eaf-2f31-4315-887e-aeeb76726046", "address": "fa:16:3e:33:bd:b7", "network": {"id": "1c1c83a1-bff8-41d2-a1b3-f77e1fc29db4", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-595354796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bd9a8695f474c76afac0461f442839d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e61eaf-2f", "ovs_interfaceid": "c6e61eaf-2f31-4315-887e-aeeb76726046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2240.026248] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:bd:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '669e4919-e0ad-4e23-9f23-4c5f2be0d858', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6e61eaf-2f31-4315-887e-aeeb76726046', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2240.033739] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Creating folder: Project (9bd9a8695f474c76afac0461f442839d). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2240.033995] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec6bd080-09d6-43db-b81a-c813777d8bae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.045329] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Created folder: Project (9bd9a8695f474c76afac0461f442839d) in parent group-v444854. [ 2240.045513] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Creating folder: Instances. Parent ref: group-v444975. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2240.045735] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb095cba-badb-44a5-8014-11e38a227a44 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.055264] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Created folder: Instances in parent group-v444975. [ 2240.055502] env[62875]: DEBUG oslo.service.loopingcall [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2240.056117] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2240.056327] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca21d315-4275-4d94-bcc2-6e61bf5f37ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.074719] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2240.074719] env[62875]: value = "task-2180483" [ 2240.074719] env[62875]: _type = "Task" [ 2240.074719] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.082431] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180483, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.225632] env[62875]: DEBUG nova.compute.manager [req-703ec14a-0c71-4ac8-9d0a-9855e7b2ce0e req-782a70fa-6ff4-4f4c-b577-02bff62ed9a2 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Received event network-vif-plugged-0997f285-a150-4272-8178-5e00f47156d5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2240.225762] env[62875]: DEBUG oslo_concurrency.lockutils [req-703ec14a-0c71-4ac8-9d0a-9855e7b2ce0e req-782a70fa-6ff4-4f4c-b577-02bff62ed9a2 service nova] Acquiring lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2240.226139] env[62875]: DEBUG oslo_concurrency.lockutils [req-703ec14a-0c71-4ac8-9d0a-9855e7b2ce0e req-782a70fa-6ff4-4f4c-b577-02bff62ed9a2 service nova] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2240.226395] env[62875]: DEBUG oslo_concurrency.lockutils [req-703ec14a-0c71-4ac8-9d0a-9855e7b2ce0e req-782a70fa-6ff4-4f4c-b577-02bff62ed9a2 service nova] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2240.226625] env[62875]: DEBUG nova.compute.manager [req-703ec14a-0c71-4ac8-9d0a-9855e7b2ce0e req-782a70fa-6ff4-4f4c-b577-02bff62ed9a2 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] No waiting events found dispatching network-vif-plugged-0997f285-a150-4272-8178-5e00f47156d5 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2240.226847] env[62875]: WARNING nova.compute.manager [req-703ec14a-0c71-4ac8-9d0a-9855e7b2ce0e req-782a70fa-6ff4-4f4c-b577-02bff62ed9a2 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Received unexpected event network-vif-plugged-0997f285-a150-4272-8178-5e00f47156d5 for instance with vm_state building and task_state spawning. [ 2240.300022] env[62875]: DEBUG nova.compute.manager [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Received event network-changed-c6e61eaf-2f31-4315-887e-aeeb76726046 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2240.300242] env[62875]: DEBUG nova.compute.manager [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Refreshing instance network info cache due to event network-changed-c6e61eaf-2f31-4315-887e-aeeb76726046. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2240.300769] env[62875]: DEBUG oslo_concurrency.lockutils [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] Acquiring lock "refresh_cache-f629aa16-0442-4659-9a9c-30f10136ae84" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2240.300913] env[62875]: DEBUG oslo_concurrency.lockutils [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] Acquired lock "refresh_cache-f629aa16-0442-4659-9a9c-30f10136ae84" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2240.301084] env[62875]: DEBUG nova.network.neutron [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Refreshing network info cache for port c6e61eaf-2f31-4315-887e-aeeb76726046 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2240.372333] env[62875]: DEBUG nova.network.neutron [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Successfully updated port: 0997f285-a150-4272-8178-5e00f47156d5 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2240.515395] env[62875]: DEBUG nova.compute.utils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2240.519562] env[62875]: DEBUG nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2240.519562] env[62875]: DEBUG nova.network.neutron [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2240.520805] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2240.568146] env[62875]: DEBUG nova.policy [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f23fcdbba3446caa2896dd7017d8112', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7df08f1e384b414bb0820979688a2a4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2240.584570] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180483, 'name': CreateVM_Task, 'duration_secs': 0.380336} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2240.584722] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2240.585364] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2240.585521] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2240.585833] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2240.586075] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ddad17f-df7e-4546-8f36-884bad87a7fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.590252] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2240.590252] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e6a724-c10e-a209-827b-555a332a2c24" [ 2240.590252] env[62875]: _type = "Task" [ 2240.590252] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.597529] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e6a724-c10e-a209-827b-555a332a2c24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.876561] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "refresh_cache-36c909f2-5d06-4a3e-ace2-15d2e36b4a95" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2240.876843] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "refresh_cache-36c909f2-5d06-4a3e-ace2-15d2e36b4a95" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2240.876843] env[62875]: DEBUG nova.network.neutron [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2240.902717] env[62875]: DEBUG nova.network.neutron [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Successfully created port: 37efe56a-09a3-4de0-ad7e-7c89ebad0afc {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2241.000144] env[62875]: DEBUG nova.network.neutron [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Updated VIF entry in instance network info cache for port c6e61eaf-2f31-4315-887e-aeeb76726046. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2241.000591] env[62875]: DEBUG nova.network.neutron [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Updating instance_info_cache with network_info: [{"id": "c6e61eaf-2f31-4315-887e-aeeb76726046", "address": "fa:16:3e:33:bd:b7", "network": {"id": "1c1c83a1-bff8-41d2-a1b3-f77e1fc29db4", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-595354796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bd9a8695f474c76afac0461f442839d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e61eaf-2f", "ovs_interfaceid": "c6e61eaf-2f31-4315-887e-aeeb76726046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2241.019661] env[62875]: DEBUG nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2241.100915] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e6a724-c10e-a209-827b-555a332a2c24, 'name': SearchDatastore_Task, 'duration_secs': 0.008662} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2241.101254] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2241.101467] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2241.101698] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2241.101844] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2241.102053] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2241.104237] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a03354b8-ea6e-4fb4-aab5-15d4e7732854 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.112546] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2241.112752] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2241.113500] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb38b270-5773-4cce-8afc-f5d36c61c672 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.120715] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2241.120715] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52130a30-b9cb-5f80-7d99-909328553ad0" [ 2241.120715] env[62875]: _type = "Task" [ 2241.120715] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2241.128457] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52130a30-b9cb-5f80-7d99-909328553ad0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.190031] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7719da-3f19-4264-b366-88323691a09b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.196909] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd81f3f5-e709-4a8f-83c4-cf4eb4a21c38 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.228044] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2228fc-ce37-4d95-91b8-0b0b04524aff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.235199] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68adb48f-5df8-4433-a36c-d4e418d4f0e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.248475] env[62875]: DEBUG nova.compute.provider_tree [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2241.407635] env[62875]: DEBUG nova.network.neutron [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2241.504652] env[62875]: DEBUG oslo_concurrency.lockutils [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] Releasing lock "refresh_cache-f629aa16-0442-4659-9a9c-30f10136ae84" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2241.504931] env[62875]: DEBUG nova.compute.manager [req-46faef47-1b83-4db7-bf5c-123fa46e6d1c req-2ec92f98-b9a6-4af4-b4d3-709dcfb3515b service nova] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Received event network-vif-deleted-acf0849a-ab7f-4949-951d-8268baee804c {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2241.536018] env[62875]: DEBUG nova.network.neutron [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Updating instance_info_cache with network_info: [{"id": "0997f285-a150-4272-8178-5e00f47156d5", "address": "fa:16:3e:fd:3e:d7", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0997f285-a1", "ovs_interfaceid": "0997f285-a150-4272-8178-5e00f47156d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2241.631282] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52130a30-b9cb-5f80-7d99-909328553ad0, 'name': SearchDatastore_Task, 'duration_secs': 0.008297} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2241.632076] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c566ede0-ab86-48b8-8d42-82f3a059f813 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.637318] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2241.637318] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52125aa0-55c4-345d-8b32-19b053ff13b9" [ 2241.637318] env[62875]: _type = "Task" [ 2241.637318] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2241.644404] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52125aa0-55c4-345d-8b32-19b053ff13b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.751298] env[62875]: DEBUG nova.scheduler.client.report [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2242.030151] env[62875]: DEBUG nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2242.038720] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "refresh_cache-36c909f2-5d06-4a3e-ace2-15d2e36b4a95" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2242.039167] env[62875]: DEBUG nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Instance network_info: |[{"id": "0997f285-a150-4272-8178-5e00f47156d5", "address": "fa:16:3e:fd:3e:d7", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0997f285-a1", "ovs_interfaceid": "0997f285-a150-4272-8178-5e00f47156d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2242.039485] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:3e:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0997f285-a150-4272-8178-5e00f47156d5', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2242.047081] env[62875]: DEBUG oslo.service.loopingcall [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2242.047301] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2242.049319] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bd46c37-e389-44fe-a637-af59bb0875d5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.069562] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2242.069783] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2242.070155] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2242.070155] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2242.070273] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2242.070399] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2242.070595] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2242.071476] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2242.071476] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2242.071476] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2242.071476] env[62875]: DEBUG nova.virt.hardware [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2242.072093] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79082e92-6e60-474c-b78c-be58b08bef5f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.075629] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2242.075629] env[62875]: value = "task-2180484" [ 2242.075629] env[62875]: _type = "Task" [ 2242.075629] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.082359] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca29d9c-449e-46be-a801-809eee653026 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.088970] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180484, 'name': CreateVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.149310] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52125aa0-55c4-345d-8b32-19b053ff13b9, 'name': SearchDatastore_Task, 'duration_secs': 0.008963} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2242.149737] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2242.150165] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] f629aa16-0442-4659-9a9c-30f10136ae84/f629aa16-0442-4659-9a9c-30f10136ae84.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2242.150539] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53fe190e-a57e-486c-a7e3-17c0ec5dea5a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.158455] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2242.158455] env[62875]: value = "task-2180485" [ 2242.158455] env[62875]: _type = "Task" [ 2242.158455] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.165983] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.255952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2242.256472] env[62875]: DEBUG nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2242.259986] env[62875]: DEBUG nova.compute.manager [req-727869ae-2785-46aa-98e8-c11ffde09f90 req-a612a173-8205-4784-afbd-d5805073ce09 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Received event network-changed-0997f285-a150-4272-8178-5e00f47156d5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2242.260344] env[62875]: DEBUG nova.compute.manager [req-727869ae-2785-46aa-98e8-c11ffde09f90 req-a612a173-8205-4784-afbd-d5805073ce09 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Refreshing instance network info cache due to event network-changed-0997f285-a150-4272-8178-5e00f47156d5. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2242.260609] env[62875]: DEBUG oslo_concurrency.lockutils [req-727869ae-2785-46aa-98e8-c11ffde09f90 req-a612a173-8205-4784-afbd-d5805073ce09 service nova] Acquiring lock "refresh_cache-36c909f2-5d06-4a3e-ace2-15d2e36b4a95" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2242.260788] env[62875]: DEBUG oslo_concurrency.lockutils [req-727869ae-2785-46aa-98e8-c11ffde09f90 req-a612a173-8205-4784-afbd-d5805073ce09 service nova] Acquired lock "refresh_cache-36c909f2-5d06-4a3e-ace2-15d2e36b4a95" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2242.260986] env[62875]: DEBUG nova.network.neutron [req-727869ae-2785-46aa-98e8-c11ffde09f90 req-a612a173-8205-4784-afbd-d5805073ce09 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Refreshing network info cache for port 0997f285-a150-4272-8178-5e00f47156d5 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2242.262285] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.791s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2242.265457] env[62875]: INFO nova.compute.claims [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2242.449159] env[62875]: DEBUG nova.network.neutron [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Successfully updated port: 37efe56a-09a3-4de0-ad7e-7c89ebad0afc {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2242.586652] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180484, 'name': CreateVM_Task, 'duration_secs': 0.443056} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2242.586843] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2242.587574] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2242.587752] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2242.588100] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2242.588375] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c089aa9-ba71-4f11-89fe-06678032d6ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.593771] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2242.593771] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527eafbc-fc49-d1e9-fad0-c75c9f19e82c" [ 2242.593771] env[62875]: _type = "Task" [ 2242.593771] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.602505] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527eafbc-fc49-d1e9-fad0-c75c9f19e82c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.667551] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467588} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2242.667799] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] f629aa16-0442-4659-9a9c-30f10136ae84/f629aa16-0442-4659-9a9c-30f10136ae84.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2242.668015] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2242.668328] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1910469-a571-4b84-8e1e-7960cce185b7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.674096] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2242.674096] env[62875]: value = "task-2180486" [ 2242.674096] env[62875]: _type = "Task" [ 2242.674096] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.680955] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.763945] env[62875]: DEBUG nova.compute.utils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2242.765475] env[62875]: DEBUG nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2242.765584] env[62875]: DEBUG nova.network.neutron [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2242.823477] env[62875]: DEBUG nova.policy [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f23fcdbba3446caa2896dd7017d8112', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7df08f1e384b414bb0820979688a2a4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2242.952062] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "refresh_cache-9b3628be-b8a3-4105-bc84-088dede23aaf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2242.952212] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquired lock "refresh_cache-9b3628be-b8a3-4105-bc84-088dede23aaf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2242.952268] env[62875]: DEBUG nova.network.neutron [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2242.960604] env[62875]: DEBUG nova.network.neutron [req-727869ae-2785-46aa-98e8-c11ffde09f90 req-a612a173-8205-4784-afbd-d5805073ce09 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Updated VIF entry in instance network info cache for port 0997f285-a150-4272-8178-5e00f47156d5. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2242.961060] env[62875]: DEBUG nova.network.neutron [req-727869ae-2785-46aa-98e8-c11ffde09f90 req-a612a173-8205-4784-afbd-d5805073ce09 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Updating instance_info_cache with network_info: [{"id": "0997f285-a150-4272-8178-5e00f47156d5", "address": "fa:16:3e:fd:3e:d7", "network": {"id": "3a4a6f31-0ede-476e-b050-f6a06bfca4d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-351674143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "226340868e7446cca12688a32d13c630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0997f285-a1", "ovs_interfaceid": "0997f285-a150-4272-8178-5e00f47156d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2243.051712] env[62875]: DEBUG nova.network.neutron [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Successfully created port: edf1649b-cbe3-413d-b8ea-f4c89cd08fc5 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2243.105781] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527eafbc-fc49-d1e9-fad0-c75c9f19e82c, 'name': SearchDatastore_Task, 'duration_secs': 0.018348} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.106101] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2243.106336] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2243.106567] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2243.106735] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2243.106930] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2243.107208] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3810eb3-431e-4c68-81e8-bdb2b35b6778 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.115479] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2243.115661] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2243.116413] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d3fb11-1e9c-45aa-8bc3-ea8ef341d75a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.123248] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2243.123248] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52928ec5-c191-9195-afb7-0104277fd96b" [ 2243.123248] env[62875]: _type = "Task" [ 2243.123248] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.130864] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52928ec5-c191-9195-afb7-0104277fd96b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.183236] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059272} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.183583] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2243.184348] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a631847-0eb1-4e3a-94a1-883df32b7898 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.207532] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] f629aa16-0442-4659-9a9c-30f10136ae84/f629aa16-0442-4659-9a9c-30f10136ae84.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2243.207532] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8678baa-f6d3-4644-8961-8ff8c67ed2e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.226817] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2243.226817] env[62875]: value = "task-2180487" [ 2243.226817] env[62875]: _type = "Task" [ 2243.226817] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.234829] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180487, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.270756] env[62875]: DEBUG nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2243.427890] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c503f70d-d82d-4b23-a3c4-968f975dede6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.435586] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1a6506-0bd8-45c8-a439-ac3a21885047 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.472072] env[62875]: DEBUG oslo_concurrency.lockutils [req-727869ae-2785-46aa-98e8-c11ffde09f90 req-a612a173-8205-4784-afbd-d5805073ce09 service nova] Releasing lock "refresh_cache-36c909f2-5d06-4a3e-ace2-15d2e36b4a95" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2243.472572] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9945ba-72f7-40a8-9033-cf15b90b25ea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.480305] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560f0c2a-7871-4fcb-a5a0-b0ef548ac388 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.494161] env[62875]: DEBUG nova.compute.provider_tree [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2243.501749] env[62875]: DEBUG nova.network.neutron [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2243.634039] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52928ec5-c191-9195-afb7-0104277fd96b, 'name': SearchDatastore_Task, 'duration_secs': 0.007893} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.634966] env[62875]: DEBUG nova.network.neutron [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Updating instance_info_cache with network_info: [{"id": "37efe56a-09a3-4de0-ad7e-7c89ebad0afc", "address": "fa:16:3e:06:d6:d5", "network": {"id": "584b981c-d3be-4574-af88-35d1c69852c4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "095aa2361c8c47a1b4891b36019a4780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37efe56a-09", "ovs_interfaceid": "37efe56a-09a3-4de0-ad7e-7c89ebad0afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2243.637140] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-069819ee-63e8-4a15-9c83-7a744ef27c4b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.642515] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2243.642515] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520693b0-b846-f801-705b-f61d293262bd" [ 2243.642515] env[62875]: _type = "Task" [ 2243.642515] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.650977] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520693b0-b846-f801-705b-f61d293262bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.739529] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180487, 'name': ReconfigVM_Task, 'duration_secs': 0.275433} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.739864] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Reconfigured VM instance instance-00000053 to attach disk [datastore1] f629aa16-0442-4659-9a9c-30f10136ae84/f629aa16-0442-4659-9a9c-30f10136ae84.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2243.740578] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c6f1510-7dd7-400d-b0a2-12a22b63e1d4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.747390] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2243.747390] env[62875]: value = "task-2180488" [ 2243.747390] env[62875]: _type = "Task" [ 2243.747390] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.756071] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180488, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.998027] env[62875]: DEBUG nova.scheduler.client.report [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2244.137927] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Releasing lock "refresh_cache-9b3628be-b8a3-4105-bc84-088dede23aaf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2244.138293] env[62875]: DEBUG nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Instance network_info: |[{"id": "37efe56a-09a3-4de0-ad7e-7c89ebad0afc", "address": "fa:16:3e:06:d6:d5", "network": {"id": "584b981c-d3be-4574-af88-35d1c69852c4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "095aa2361c8c47a1b4891b36019a4780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37efe56a-09", "ovs_interfaceid": "37efe56a-09a3-4de0-ad7e-7c89ebad0afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2244.138729] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:d6:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37efe56a-09a3-4de0-ad7e-7c89ebad0afc', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2244.146305] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Creating folder: Project (7df08f1e384b414bb0820979688a2a4f). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2244.146947] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2bcdb30-e820-4cf8-9a2e-d3134d2992a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.157664] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520693b0-b846-f801-705b-f61d293262bd, 'name': SearchDatastore_Task, 'duration_secs': 0.080108} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.157895] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2244.159038] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 36c909f2-5d06-4a3e-ace2-15d2e36b4a95/36c909f2-5d06-4a3e-ace2-15d2e36b4a95.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2244.159038] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4db2ceb2-11c2-4ee2-b5d8-e939f0cdddaa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.161098] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Created folder: Project (7df08f1e384b414bb0820979688a2a4f) in parent group-v444854. [ 2244.161277] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Creating folder: Instances. Parent ref: group-v444979. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2244.161774] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78aafe44-cbfe-43b1-a76c-a0da8ed1b04f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.165623] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2244.165623] env[62875]: value = "task-2180490" [ 2244.165623] env[62875]: _type = "Task" [ 2244.165623] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.170797] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Created folder: Instances in parent group-v444979. [ 2244.171032] env[62875]: DEBUG oslo.service.loopingcall [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2244.173786] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2244.174035] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.174234] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bb04667-71f1-460e-808b-9e68334df916 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.192716] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2244.192716] env[62875]: value = "task-2180492" [ 2244.192716] env[62875]: _type = "Task" [ 2244.192716] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.200086] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180492, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.258968] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180488, 'name': Rename_Task, 'duration_secs': 0.138552} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.258968] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2244.259223] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7046f234-94ff-4cff-97a1-9127d0ee5b98 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.265948] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2244.265948] env[62875]: value = "task-2180493" [ 2244.265948] env[62875]: _type = "Task" [ 2244.265948] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.273877] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.281695] env[62875]: DEBUG nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2244.286357] env[62875]: DEBUG nova.compute.manager [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Received event network-vif-plugged-37efe56a-09a3-4de0-ad7e-7c89ebad0afc {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2244.286595] env[62875]: DEBUG oslo_concurrency.lockutils [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] Acquiring lock "9b3628be-b8a3-4105-bc84-088dede23aaf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2244.286811] env[62875]: DEBUG oslo_concurrency.lockutils [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] Lock "9b3628be-b8a3-4105-bc84-088dede23aaf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.286981] env[62875]: DEBUG oslo_concurrency.lockutils [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] Lock "9b3628be-b8a3-4105-bc84-088dede23aaf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.287168] env[62875]: DEBUG nova.compute.manager [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] No waiting events found dispatching network-vif-plugged-37efe56a-09a3-4de0-ad7e-7c89ebad0afc {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2244.287376] env[62875]: WARNING nova.compute.manager [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Received unexpected event network-vif-plugged-37efe56a-09a3-4de0-ad7e-7c89ebad0afc for instance with vm_state building and task_state spawning. [ 2244.287485] env[62875]: DEBUG nova.compute.manager [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Received event network-changed-37efe56a-09a3-4de0-ad7e-7c89ebad0afc {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2244.287634] env[62875]: DEBUG nova.compute.manager [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Refreshing instance network info cache due to event network-changed-37efe56a-09a3-4de0-ad7e-7c89ebad0afc. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2244.287845] env[62875]: DEBUG oslo_concurrency.lockutils [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] Acquiring lock "refresh_cache-9b3628be-b8a3-4105-bc84-088dede23aaf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2244.287945] env[62875]: DEBUG oslo_concurrency.lockutils [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] Acquired lock "refresh_cache-9b3628be-b8a3-4105-bc84-088dede23aaf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2244.288113] env[62875]: DEBUG nova.network.neutron [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Refreshing network info cache for port 37efe56a-09a3-4de0-ad7e-7c89ebad0afc {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2244.309610] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2244.309852] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2244.310024] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2244.310324] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2244.310390] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2244.310503] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2244.310705] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2244.310861] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2244.311046] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2244.311267] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2244.311480] env[62875]: DEBUG nova.virt.hardware [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2244.312376] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab76e2f-7fee-464d-b739-a8848a156a2a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.320802] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79399ff6-81e0-435b-b73e-e9d973185718 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.503624] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.241s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.504056] env[62875]: DEBUG nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2244.506735] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.024s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.506973] env[62875]: DEBUG nova.objects.instance [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lazy-loading 'resources' on Instance uuid c4b43f9a-9c49-4281-a102-5d34f26cc9df {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2244.610681] env[62875]: DEBUG nova.compute.manager [req-49a36267-ed78-406a-8729-01a3a9ffedaa req-82f2c00a-e9ff-4c5f-aff8-cea09b94b54b service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Received event network-vif-plugged-edf1649b-cbe3-413d-b8ea-f4c89cd08fc5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2244.610918] env[62875]: DEBUG oslo_concurrency.lockutils [req-49a36267-ed78-406a-8729-01a3a9ffedaa req-82f2c00a-e9ff-4c5f-aff8-cea09b94b54b service nova] Acquiring lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2244.611146] env[62875]: DEBUG oslo_concurrency.lockutils [req-49a36267-ed78-406a-8729-01a3a9ffedaa req-82f2c00a-e9ff-4c5f-aff8-cea09b94b54b service nova] Lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.611317] env[62875]: DEBUG oslo_concurrency.lockutils [req-49a36267-ed78-406a-8729-01a3a9ffedaa req-82f2c00a-e9ff-4c5f-aff8-cea09b94b54b service nova] Lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.611483] env[62875]: DEBUG nova.compute.manager [req-49a36267-ed78-406a-8729-01a3a9ffedaa req-82f2c00a-e9ff-4c5f-aff8-cea09b94b54b service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] No waiting events found dispatching network-vif-plugged-edf1649b-cbe3-413d-b8ea-f4c89cd08fc5 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2244.611648] env[62875]: WARNING nova.compute.manager [req-49a36267-ed78-406a-8729-01a3a9ffedaa req-82f2c00a-e9ff-4c5f-aff8-cea09b94b54b service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Received unexpected event network-vif-plugged-edf1649b-cbe3-413d-b8ea-f4c89cd08fc5 for instance with vm_state building and task_state spawning. [ 2244.677590] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180490, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.702038] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180492, 'name': CreateVM_Task, 'duration_secs': 0.499785} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.702233] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2244.702928] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2244.703120] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2244.703447] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2244.703704] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b333af6e-cebe-4094-a056-8467c2cec2e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.708278] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2244.708278] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5250bdfb-ad4f-f699-b7e2-5f37db29d197" [ 2244.708278] env[62875]: _type = "Task" [ 2244.708278] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.716453] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5250bdfb-ad4f-f699-b7e2-5f37db29d197, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.775823] env[62875]: DEBUG oslo_vmware.api [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180493, 'name': PowerOnVM_Task, 'duration_secs': 0.459167} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.776108] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2244.776317] env[62875]: INFO nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Took 11.21 seconds to spawn the instance on the hypervisor. [ 2244.776495] env[62875]: DEBUG nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2244.777291] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868f604c-d0b1-4873-ad2e-54312b74e28d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.980025] env[62875]: DEBUG nova.network.neutron [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Updated VIF entry in instance network info cache for port 37efe56a-09a3-4de0-ad7e-7c89ebad0afc. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2244.980418] env[62875]: DEBUG nova.network.neutron [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Updating instance_info_cache with network_info: [{"id": "37efe56a-09a3-4de0-ad7e-7c89ebad0afc", "address": "fa:16:3e:06:d6:d5", "network": {"id": "584b981c-d3be-4574-af88-35d1c69852c4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "095aa2361c8c47a1b4891b36019a4780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37efe56a-09", "ovs_interfaceid": "37efe56a-09a3-4de0-ad7e-7c89ebad0afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.009960] env[62875]: DEBUG nova.compute.utils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2245.013801] env[62875]: DEBUG nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2245.013967] env[62875]: DEBUG nova.network.neutron [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2245.051792] env[62875]: DEBUG nova.policy [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e818b6d10af42bb9c86e79ae93de507', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7523e34b88d84ec1ae28221d8d1a3591', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2245.148937] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f8cb42-b23b-483f-bde7-d272122f2e5b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.156461] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82daf2c5-f5c8-4279-9732-ed56496b4223 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.187350] env[62875]: DEBUG nova.network.neutron [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Successfully updated port: edf1649b-cbe3-413d-b8ea-f4c89cd08fc5 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2245.191772] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e782fb7c-73c6-4a6e-b43c-5e13397fdb2d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.200116] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180490, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52158} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.202805] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 36c909f2-5d06-4a3e-ace2-15d2e36b4a95/36c909f2-5d06-4a3e-ace2-15d2e36b4a95.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2245.203045] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2245.203635] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee2e5120-b4de-4521-ab9f-89ae5eeaef15 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.206343] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dec697d-528d-44d3-9982-8c56ecb89ca0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.215439] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2245.215439] env[62875]: value = "task-2180494" [ 2245.215439] env[62875]: _type = "Task" [ 2245.215439] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.226151] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5250bdfb-ad4f-f699-b7e2-5f37db29d197, 'name': SearchDatastore_Task, 'duration_secs': 0.010024} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.226846] env[62875]: DEBUG nova.compute.provider_tree [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2245.231870] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.231870] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2245.231870] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.231870] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.231870] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2245.232260] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b61a175a-480a-4974-8bf6-6c3cd1d37b0d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.239301] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180494, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.240327] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2245.240576] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2245.241157] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e75842e8-d64d-455f-b8cd-0d3118f00b85 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.245914] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2245.245914] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb4a8c-c19e-2788-4433-4e9ec7980e0b" [ 2245.245914] env[62875]: _type = "Task" [ 2245.245914] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.253490] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb4a8c-c19e-2788-4433-4e9ec7980e0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.295656] env[62875]: INFO nova.compute.manager [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Took 18.42 seconds to build instance. [ 2245.344260] env[62875]: DEBUG nova.network.neutron [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Successfully created port: b827e10c-2333-47ac-b3df-d384edbf2261 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2245.483131] env[62875]: DEBUG oslo_concurrency.lockutils [req-6f4fc1b8-48bf-408f-96fa-fab27b42d2ba req-e28efcfd-f209-49e1-9310-983d8199b8ff service nova] Releasing lock "refresh_cache-9b3628be-b8a3-4105-bc84-088dede23aaf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.515131] env[62875]: DEBUG nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2245.695774] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "refresh_cache-d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.695963] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquired lock "refresh_cache-d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.696142] env[62875]: DEBUG nova.network.neutron [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2245.737085] env[62875]: DEBUG nova.scheduler.client.report [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2245.740463] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180494, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064818} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.741096] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2245.742301] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc3ed0c-11e2-4adf-9279-2c55a1ed2c01 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.768932] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 36c909f2-5d06-4a3e-ace2-15d2e36b4a95/36c909f2-5d06-4a3e-ace2-15d2e36b4a95.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2245.770287] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48750439-2000-44ff-a98f-ee1267e2d3e7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.788150] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb4a8c-c19e-2788-4433-4e9ec7980e0b, 'name': SearchDatastore_Task, 'duration_secs': 0.008381} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.789360] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d32e9496-6c5d-490a-b6a1-82293be3dec6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.793164] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2245.793164] env[62875]: value = "task-2180495" [ 2245.793164] env[62875]: _type = "Task" [ 2245.793164] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.798570] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2245.798570] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c9e9c7-e889-9638-e731-50b8f15d812b" [ 2245.798570] env[62875]: _type = "Task" [ 2245.798570] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.801024] env[62875]: DEBUG oslo_concurrency.lockutils [None req-36066b24-35c4-4429-82eb-177470576a30 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "f629aa16-0442-4659-9a9c-30f10136ae84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.936s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.808133] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180495, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.814101] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c9e9c7-e889-9638-e731-50b8f15d812b, 'name': SearchDatastore_Task, 'duration_secs': 0.009732} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.814350] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.814599] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 9b3628be-b8a3-4105-bc84-088dede23aaf/9b3628be-b8a3-4105-bc84-088dede23aaf.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2245.814845] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-944177ac-207b-4bcb-9fcc-7f9d6eaa045b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.822399] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2245.822399] env[62875]: value = "task-2180496" [ 2245.822399] env[62875]: _type = "Task" [ 2245.822399] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.829988] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.233632] env[62875]: DEBUG nova.network.neutron [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2246.246676] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.740s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.249059] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.728s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.249296] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.265782] env[62875]: INFO nova.scheduler.client.report [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted allocations for instance c4b43f9a-9c49-4281-a102-5d34f26cc9df [ 2246.271622] env[62875]: INFO nova.scheduler.client.report [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted allocations for instance 2a16938e-eeaa-430d-961b-4b060187ba99 [ 2246.305026] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquiring lock "f629aa16-0442-4659-9a9c-30f10136ae84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.305336] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "f629aa16-0442-4659-9a9c-30f10136ae84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.305557] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquiring lock "f629aa16-0442-4659-9a9c-30f10136ae84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.305753] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "f629aa16-0442-4659-9a9c-30f10136ae84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.305939] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "f629aa16-0442-4659-9a9c-30f10136ae84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.307643] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180495, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.308375] env[62875]: INFO nova.compute.manager [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Terminating instance [ 2246.311142] env[62875]: DEBUG nova.compute.manager [req-92d6322c-314b-45e4-adf2-deb76208b074 req-18353c2b-a009-4981-b807-42bf05ddd1d9 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Received event network-changed-edf1649b-cbe3-413d-b8ea-f4c89cd08fc5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2246.311335] env[62875]: DEBUG nova.compute.manager [req-92d6322c-314b-45e4-adf2-deb76208b074 req-18353c2b-a009-4981-b807-42bf05ddd1d9 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Refreshing instance network info cache due to event network-changed-edf1649b-cbe3-413d-b8ea-f4c89cd08fc5. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2246.311538] env[62875]: DEBUG oslo_concurrency.lockutils [req-92d6322c-314b-45e4-adf2-deb76208b074 req-18353c2b-a009-4981-b807-42bf05ddd1d9 service nova] Acquiring lock "refresh_cache-d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2246.333700] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48067} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.334019] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 9b3628be-b8a3-4105-bc84-088dede23aaf/9b3628be-b8a3-4105-bc84-088dede23aaf.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2246.334277] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2246.334578] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1abe088e-f484-47f6-a8ad-09d1c657e6c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.340981] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2246.340981] env[62875]: value = "task-2180497" [ 2246.340981] env[62875]: _type = "Task" [ 2246.340981] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.349407] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.390069] env[62875]: DEBUG nova.network.neutron [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Updating instance_info_cache with network_info: [{"id": "edf1649b-cbe3-413d-b8ea-f4c89cd08fc5", "address": "fa:16:3e:8c:67:21", "network": {"id": "584b981c-d3be-4574-af88-35d1c69852c4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "095aa2361c8c47a1b4891b36019a4780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf1649b-cb", "ovs_interfaceid": "edf1649b-cbe3-413d-b8ea-f4c89cd08fc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.525186] env[62875]: DEBUG nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2246.550798] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2246.551052] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2246.551218] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2246.551401] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2246.551548] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2246.551692] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2246.551897] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2246.552089] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2246.552681] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2246.552681] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2246.552681] env[62875]: DEBUG nova.virt.hardware [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2246.553518] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee93a174-849a-4526-a246-2781b0fc9a2c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.563387] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e410228-0775-4f6d-92b4-a4c9ca9f978b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.725153] env[62875]: DEBUG nova.compute.manager [req-88ebf773-f1b5-4200-92be-10448b859844 req-cd983c6d-aec4-486c-9d40-56a62b4084be service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Received event network-vif-plugged-b827e10c-2333-47ac-b3df-d384edbf2261 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2246.725401] env[62875]: DEBUG oslo_concurrency.lockutils [req-88ebf773-f1b5-4200-92be-10448b859844 req-cd983c6d-aec4-486c-9d40-56a62b4084be service nova] Acquiring lock "3458fc2b-a241-4492-9b65-f89b955b8c0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.725620] env[62875]: DEBUG oslo_concurrency.lockutils [req-88ebf773-f1b5-4200-92be-10448b859844 req-cd983c6d-aec4-486c-9d40-56a62b4084be service nova] Lock "3458fc2b-a241-4492-9b65-f89b955b8c0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.725792] env[62875]: DEBUG oslo_concurrency.lockutils [req-88ebf773-f1b5-4200-92be-10448b859844 req-cd983c6d-aec4-486c-9d40-56a62b4084be service nova] Lock "3458fc2b-a241-4492-9b65-f89b955b8c0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.725963] env[62875]: DEBUG nova.compute.manager [req-88ebf773-f1b5-4200-92be-10448b859844 req-cd983c6d-aec4-486c-9d40-56a62b4084be service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] No waiting events found dispatching network-vif-plugged-b827e10c-2333-47ac-b3df-d384edbf2261 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2246.726304] env[62875]: WARNING nova.compute.manager [req-88ebf773-f1b5-4200-92be-10448b859844 req-cd983c6d-aec4-486c-9d40-56a62b4084be service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Received unexpected event network-vif-plugged-b827e10c-2333-47ac-b3df-d384edbf2261 for instance with vm_state building and task_state spawning. [ 2246.780579] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ff736268-718e-474f-b1a6-af16d2141ccf tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "2a16938e-eeaa-430d-961b-4b060187ba99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.754s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.781687] env[62875]: DEBUG oslo_concurrency.lockutils [None req-95b3a542-4eae-4d63-9b8b-d08badf46762 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "c4b43f9a-9c49-4281-a102-5d34f26cc9df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.161s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.805683] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180495, 'name': ReconfigVM_Task, 'duration_secs': 0.550456} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.805864] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 36c909f2-5d06-4a3e-ace2-15d2e36b4a95/36c909f2-5d06-4a3e-ace2-15d2e36b4a95.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2246.806485] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a204725-3fc1-419d-bf66-3581ef1a365d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.812868] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2246.812868] env[62875]: value = "task-2180498" [ 2246.812868] env[62875]: _type = "Task" [ 2246.812868] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.813474] env[62875]: DEBUG nova.compute.manager [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2246.813753] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2246.818067] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e6f7c6-3dd8-4d5c-90d8-1a1bdf25ac7e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.826090] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2246.829389] env[62875]: DEBUG nova.network.neutron [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Successfully updated port: b827e10c-2333-47ac-b3df-d384edbf2261 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2246.830489] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e9a19f6-bf57-4e86-8417-04f5fa2e4ea5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.831984] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180498, 'name': Rename_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.837045] env[62875]: DEBUG oslo_vmware.api [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2246.837045] env[62875]: value = "task-2180499" [ 2246.837045] env[62875]: _type = "Task" [ 2246.837045] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.850640] env[62875]: DEBUG oslo_vmware.api [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180499, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.856154] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180497, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064134} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.856604] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2246.857485] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b7584c-eab8-4c3e-99cd-28260a817a7b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.880010] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 9b3628be-b8a3-4105-bc84-088dede23aaf/9b3628be-b8a3-4105-bc84-088dede23aaf.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2246.880307] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11ff217a-4ffd-41dc-a00b-5184fa7eac54 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.895579] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Releasing lock "refresh_cache-d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2246.895896] env[62875]: DEBUG nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Instance network_info: |[{"id": "edf1649b-cbe3-413d-b8ea-f4c89cd08fc5", "address": "fa:16:3e:8c:67:21", "network": {"id": "584b981c-d3be-4574-af88-35d1c69852c4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "095aa2361c8c47a1b4891b36019a4780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf1649b-cb", "ovs_interfaceid": "edf1649b-cbe3-413d-b8ea-f4c89cd08fc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2246.896213] env[62875]: DEBUG oslo_concurrency.lockutils [req-92d6322c-314b-45e4-adf2-deb76208b074 req-18353c2b-a009-4981-b807-42bf05ddd1d9 service nova] Acquired lock "refresh_cache-d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2246.896387] env[62875]: DEBUG nova.network.neutron [req-92d6322c-314b-45e4-adf2-deb76208b074 req-18353c2b-a009-4981-b807-42bf05ddd1d9 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Refreshing network info cache for port edf1649b-cbe3-413d-b8ea-f4c89cd08fc5 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2246.897520] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:67:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'edf1649b-cbe3-413d-b8ea-f4c89cd08fc5', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2246.905149] env[62875]: DEBUG oslo.service.loopingcall [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2246.908495] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2246.909236] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8683ea1d-3f11-4feb-9582-3992cedb3c08 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.924956] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2246.924956] env[62875]: value = "task-2180500" [ 2246.924956] env[62875]: _type = "Task" [ 2246.924956] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.930131] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2246.930131] env[62875]: value = "task-2180501" [ 2246.930131] env[62875]: _type = "Task" [ 2246.930131] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.936294] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.941824] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180501, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.138999] env[62875]: DEBUG nova.network.neutron [req-92d6322c-314b-45e4-adf2-deb76208b074 req-18353c2b-a009-4981-b807-42bf05ddd1d9 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Updated VIF entry in instance network info cache for port edf1649b-cbe3-413d-b8ea-f4c89cd08fc5. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2247.139277] env[62875]: DEBUG nova.network.neutron [req-92d6322c-314b-45e4-adf2-deb76208b074 req-18353c2b-a009-4981-b807-42bf05ddd1d9 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Updating instance_info_cache with network_info: [{"id": "edf1649b-cbe3-413d-b8ea-f4c89cd08fc5", "address": "fa:16:3e:8c:67:21", "network": {"id": "584b981c-d3be-4574-af88-35d1c69852c4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "095aa2361c8c47a1b4891b36019a4780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf1649b-cb", "ovs_interfaceid": "edf1649b-cbe3-413d-b8ea-f4c89cd08fc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2247.323575] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180498, 'name': Rename_Task, 'duration_secs': 0.189435} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.323942] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2247.324153] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfe3af8b-6521-4eaa-8eec-4af1dce637db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.331524] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2247.331524] env[62875]: value = "task-2180502" [ 2247.331524] env[62875]: _type = "Task" [ 2247.331524] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.335091] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "refresh_cache-3458fc2b-a241-4492-9b65-f89b955b8c0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.335239] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "refresh_cache-3458fc2b-a241-4492-9b65-f89b955b8c0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.335420] env[62875]: DEBUG nova.network.neutron [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2247.344412] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180502, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.350323] env[62875]: DEBUG oslo_vmware.api [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180499, 'name': PowerOffVM_Task, 'duration_secs': 0.27012} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.350566] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2247.350743] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2247.350980] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6060830-7b7a-44f4-91a9-fb2f4a217be0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.437391] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180500, 'name': ReconfigVM_Task, 'duration_secs': 0.505534} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.438124] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 9b3628be-b8a3-4105-bc84-088dede23aaf/9b3628be-b8a3-4105-bc84-088dede23aaf.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2247.438811] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d8aa614-b993-4f62-a782-6bb239b78282 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.443341] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180501, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.447945] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2247.447945] env[62875]: value = "task-2180504" [ 2247.447945] env[62875]: _type = "Task" [ 2247.447945] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.456646] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180504, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.641718] env[62875]: DEBUG oslo_concurrency.lockutils [req-92d6322c-314b-45e4-adf2-deb76208b074 req-18353c2b-a009-4981-b807-42bf05ddd1d9 service nova] Releasing lock "refresh_cache-d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2247.704702] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2247.705435] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2247.705435] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Deleting the datastore file [datastore1] f629aa16-0442-4659-9a9c-30f10136ae84 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2247.705734] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a8d7219-881f-4365-a4e9-8dfa6bd5a503 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.712639] env[62875]: DEBUG oslo_vmware.api [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for the task: (returnval){ [ 2247.712639] env[62875]: value = "task-2180505" [ 2247.712639] env[62875]: _type = "Task" [ 2247.712639] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.724414] env[62875]: DEBUG oslo_vmware.api [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.781853] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "f3297565-541b-4a8f-a753-419b6e953ff0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.781853] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.842218] env[62875]: DEBUG oslo_vmware.api [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180502, 'name': PowerOnVM_Task, 'duration_secs': 0.460829} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.842460] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2247.842674] env[62875]: INFO nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Took 8.09 seconds to spawn the instance on the hypervisor. [ 2247.842853] env[62875]: DEBUG nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2247.843922] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff8c176-4747-4b88-84a7-2d8bb8ed75e1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.881197] env[62875]: DEBUG nova.network.neutron [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2247.941097] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180501, 'name': CreateVM_Task, 'duration_secs': 0.519306} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.941292] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2247.941995] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.942251] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.942587] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2247.942865] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8860f80c-742f-4a83-8145-437d08129657 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.949669] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2247.949669] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521101aa-61ee-205f-c468-203649bf0bb7" [ 2247.949669] env[62875]: _type = "Task" [ 2247.949669] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.962426] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180504, 'name': Rename_Task, 'duration_secs': 0.131559} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.965271] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2247.965531] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521101aa-61ee-205f-c468-203649bf0bb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.965729] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8c7b4d2-da19-4b0a-bc66-b13263d045c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.971487] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2247.971487] env[62875]: value = "task-2180506" [ 2247.971487] env[62875]: _type = "Task" [ 2247.971487] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.989096] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180506, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.033630] env[62875]: DEBUG nova.network.neutron [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Updating instance_info_cache with network_info: [{"id": "b827e10c-2333-47ac-b3df-d384edbf2261", "address": "fa:16:3e:8e:bd:88", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb827e10c-23", "ovs_interfaceid": "b827e10c-2333-47ac-b3df-d384edbf2261", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2248.222201] env[62875]: DEBUG oslo_vmware.api [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Task: {'id': task-2180505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306819} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.222363] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2248.222552] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2248.222729] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2248.222898] env[62875]: INFO nova.compute.manager [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Took 1.41 seconds to destroy the instance on the hypervisor. [ 2248.223194] env[62875]: DEBUG oslo.service.loopingcall [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2248.223415] env[62875]: DEBUG nova.compute.manager [-] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2248.223514] env[62875]: DEBUG nova.network.neutron [-] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2248.283985] env[62875]: DEBUG nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2248.364934] env[62875]: INFO nova.compute.manager [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Took 12.88 seconds to build instance. [ 2248.462871] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521101aa-61ee-205f-c468-203649bf0bb7, 'name': SearchDatastore_Task, 'duration_secs': 0.017699} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.464443] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2248.464676] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2248.464919] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2248.465067] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2248.465254] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2248.465725] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5997da55-d596-43e1-85e0-3952fd394687 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.476934] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2248.477131] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2248.478153] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-471f632b-a496-4653-a7b1-fd6be272392a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.483467] env[62875]: DEBUG oslo_vmware.api [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180506, 'name': PowerOnVM_Task, 'duration_secs': 0.457667} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.484016] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2248.484234] env[62875]: INFO nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Took 6.45 seconds to spawn the instance on the hypervisor. [ 2248.484402] env[62875]: DEBUG nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2248.485151] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64dd7c0-c4f5-4b06-88fd-c4701a25c8a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.488892] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2248.488892] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b92daf-a2b2-4071-c27c-5804101a7d9a" [ 2248.488892] env[62875]: _type = "Task" [ 2248.488892] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.500297] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b92daf-a2b2-4071-c27c-5804101a7d9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.536561] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "refresh_cache-3458fc2b-a241-4492-9b65-f89b955b8c0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2248.536561] env[62875]: DEBUG nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Instance network_info: |[{"id": "b827e10c-2333-47ac-b3df-d384edbf2261", "address": "fa:16:3e:8e:bd:88", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb827e10c-23", "ovs_interfaceid": "b827e10c-2333-47ac-b3df-d384edbf2261", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2248.536840] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:bd:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b827e10c-2333-47ac-b3df-d384edbf2261', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2248.544608] env[62875]: DEBUG oslo.service.loopingcall [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2248.545059] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2248.545286] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10870934-3279-4708-b9bf-1d9b4913725e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.565636] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2248.565636] env[62875]: value = "task-2180507" [ 2248.565636] env[62875]: _type = "Task" [ 2248.565636] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.573286] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180507, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.758722] env[62875]: DEBUG nova.compute.manager [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Received event network-changed-b827e10c-2333-47ac-b3df-d384edbf2261 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2248.758722] env[62875]: DEBUG nova.compute.manager [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Refreshing instance network info cache due to event network-changed-b827e10c-2333-47ac-b3df-d384edbf2261. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2248.758961] env[62875]: DEBUG oslo_concurrency.lockutils [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] Acquiring lock "refresh_cache-3458fc2b-a241-4492-9b65-f89b955b8c0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2248.759080] env[62875]: DEBUG oslo_concurrency.lockutils [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] Acquired lock "refresh_cache-3458fc2b-a241-4492-9b65-f89b955b8c0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2248.759325] env[62875]: DEBUG nova.network.neutron [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Refreshing network info cache for port b827e10c-2333-47ac-b3df-d384edbf2261 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2248.811309] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.811575] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.813228] env[62875]: INFO nova.compute.claims [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2248.866845] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b3a9c5b2-878c-4c13-b220-c3f6c0a7e24d tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.397s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.966847] env[62875]: DEBUG nova.network.neutron [-] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2249.003215] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b92daf-a2b2-4071-c27c-5804101a7d9a, 'name': SearchDatastore_Task, 'duration_secs': 0.024477} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.005369] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98664739-1ab2-4fcc-86f5-709324d62631 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.007775] env[62875]: INFO nova.compute.manager [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Took 11.92 seconds to build instance. [ 2249.011873] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2249.011873] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a59bb4-d3a3-86bb-278b-3989012fe1ed" [ 2249.011873] env[62875]: _type = "Task" [ 2249.011873] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.019694] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a59bb4-d3a3-86bb-278b-3989012fe1ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.077407] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180507, 'name': CreateVM_Task, 'duration_secs': 0.344172} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.077588] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2249.078751] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.078751] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2249.078751] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2249.078976] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c375822e-5fbf-4dd3-9ce4-6309fa235272 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.083933] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2249.083933] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dab70b-a9e4-1f4e-7f9d-31bacbab176a" [ 2249.083933] env[62875]: _type = "Task" [ 2249.083933] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.091096] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dab70b-a9e4-1f4e-7f9d-31bacbab176a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.109595] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.110380] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2249.110380] env[62875]: DEBUG nova.compute.manager [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2249.110883] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4b68dd-2e8b-42a3-9216-e519a67e449a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.116893] env[62875]: DEBUG nova.compute.manager [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62875) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2249.117416] env[62875]: DEBUG nova.objects.instance [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lazy-loading 'flavor' on Instance uuid 36c909f2-5d06-4a3e-ace2-15d2e36b4a95 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2249.454457] env[62875]: DEBUG nova.network.neutron [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Updated VIF entry in instance network info cache for port b827e10c-2333-47ac-b3df-d384edbf2261. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2249.454809] env[62875]: DEBUG nova.network.neutron [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Updating instance_info_cache with network_info: [{"id": "b827e10c-2333-47ac-b3df-d384edbf2261", "address": "fa:16:3e:8e:bd:88", "network": {"id": "0ed13604-b495-4533-ac72-260baf769762", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-746012549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7523e34b88d84ec1ae28221d8d1a3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb827e10c-23", "ovs_interfaceid": "b827e10c-2333-47ac-b3df-d384edbf2261", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2249.469665] env[62875]: INFO nova.compute.manager [-] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Took 1.25 seconds to deallocate network for instance. [ 2249.509959] env[62875]: DEBUG oslo_concurrency.lockutils [None req-70227cc7-fc31-4dd3-8313-b479dfbdb8c8 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "9b3628be-b8a3-4105-bc84-088dede23aaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.433s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2249.522264] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a59bb4-d3a3-86bb-278b-3989012fe1ed, 'name': SearchDatastore_Task, 'duration_secs': 0.019932} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.522535] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2249.522781] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf/d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2249.523039] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d20d26c9-26c2-4faa-835f-e85ac341119e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.529568] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2249.529568] env[62875]: value = "task-2180508" [ 2249.529568] env[62875]: _type = "Task" [ 2249.529568] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.536802] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.592146] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dab70b-a9e4-1f4e-7f9d-31bacbab176a, 'name': SearchDatastore_Task, 'duration_secs': 0.030326} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.592433] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2249.592701] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2249.592936] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.593110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2249.593303] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2249.593536] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cad17b8-6cff-4832-bed5-8b139fb33f16 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.600464] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2249.600630] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2249.601302] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04a7b986-d6d8-487c-a2eb-5bfc94c16c81 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.606015] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2249.606015] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525c03be-10fc-8a31-a959-b2c25b7154c7" [ 2249.606015] env[62875]: _type = "Task" [ 2249.606015] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.614743] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525c03be-10fc-8a31-a959-b2c25b7154c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.957623] env[62875]: DEBUG oslo_concurrency.lockutils [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] Releasing lock "refresh_cache-3458fc2b-a241-4492-9b65-f89b955b8c0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2249.957892] env[62875]: DEBUG nova.compute.manager [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Received event network-vif-deleted-c6e61eaf-2f31-4315-887e-aeeb76726046 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2249.958081] env[62875]: INFO nova.compute.manager [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Neutron deleted interface c6e61eaf-2f31-4315-887e-aeeb76726046; detaching it from the instance and deleting it from the info cache [ 2249.958254] env[62875]: DEBUG nova.network.neutron [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2249.976238] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.977629] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb46caaf-054d-4d36-ab36-5b4bc4ef6aab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.989182] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff3c52b-1f6d-4ac4-94c8-9fd485357102 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.024656] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b85e5a-157b-46ed-a38d-0e727134bf1e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.035162] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b845bfd-205e-4533-9800-5d01e3ddb284 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.043739] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507902} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.051137] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf/d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2250.051327] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2250.051775] env[62875]: DEBUG nova.compute.provider_tree [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2250.053039] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01bb14bc-ccbd-40e3-b25b-bc0d857c726e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.059560] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2250.059560] env[62875]: value = "task-2180509" [ 2250.059560] env[62875]: _type = "Task" [ 2250.059560] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.068319] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.115334] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525c03be-10fc-8a31-a959-b2c25b7154c7, 'name': SearchDatastore_Task, 'duration_secs': 0.008515} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.116091] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7ab4808-e875-46ce-b6fc-cb59cdf790d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.121020] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2250.121020] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b73fc9-1825-90ab-82ad-e34ee1cb1a04" [ 2250.121020] env[62875]: _type = "Task" [ 2250.121020] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.124499] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2250.124745] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e3f508c-38cd-492a-b2ac-3e22b9b31316 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.132853] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b73fc9-1825-90ab-82ad-e34ee1cb1a04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.134075] env[62875]: DEBUG oslo_vmware.api [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2250.134075] env[62875]: value = "task-2180510" [ 2250.134075] env[62875]: _type = "Task" [ 2250.134075] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.140960] env[62875]: DEBUG oslo_vmware.api [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180510, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.461362] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5543ae69-9f54-48e1-a7bb-95720ca8556e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.471039] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746a002b-c04e-4306-a238-dce3355c6304 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.498634] env[62875]: DEBUG nova.compute.manager [req-d676ace6-a878-4c2c-b8cc-acf380d04a4d req-f7824657-12b6-41ba-b1db-ae792cb3c981 service nova] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Detach interface failed, port_id=c6e61eaf-2f31-4315-887e-aeeb76726046, reason: Instance f629aa16-0442-4659-9a9c-30f10136ae84 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2250.555880] env[62875]: DEBUG nova.scheduler.client.report [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2250.568607] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070873} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.568899] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2250.569666] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca41de5-0be4-48c6-bd12-88b8937095df {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.592287] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf/d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2250.592743] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb8d2b81-9077-4739-9863-425e89c2d331 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.611406] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2250.611406] env[62875]: value = "task-2180511" [ 2250.611406] env[62875]: _type = "Task" [ 2250.611406] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.618884] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180511, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.629917] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b73fc9-1825-90ab-82ad-e34ee1cb1a04, 'name': SearchDatastore_Task, 'duration_secs': 0.012176} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.630152] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2250.630408] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 3458fc2b-a241-4492-9b65-f89b955b8c0b/3458fc2b-a241-4492-9b65-f89b955b8c0b.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2250.630637] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-382c2329-4560-4b8b-945f-1dab526988ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.638394] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2250.638394] env[62875]: value = "task-2180512" [ 2250.638394] env[62875]: _type = "Task" [ 2250.638394] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.644061] env[62875]: DEBUG oslo_vmware.api [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180510, 'name': PowerOffVM_Task, 'duration_secs': 0.191458} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.644605] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2250.644817] env[62875]: DEBUG nova.compute.manager [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2250.645540] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9042d05-c603-4259-a806-ae57a8c3eccb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.650374] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.061086] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.061738] env[62875]: DEBUG nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2251.064744] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.089s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.064934] env[62875]: DEBUG nova.objects.instance [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lazy-loading 'resources' on Instance uuid f629aa16-0442-4659-9a9c-30f10136ae84 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2251.121656] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180511, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.149355] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180512, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466675} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.149673] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 3458fc2b-a241-4492-9b65-f89b955b8c0b/3458fc2b-a241-4492-9b65-f89b955b8c0b.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2251.151920] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2251.151920] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7af2032-ee5f-4e7b-ba93-27d1d5c22f3a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.158111] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2251.158111] env[62875]: value = "task-2180513" [ 2251.158111] env[62875]: _type = "Task" [ 2251.158111] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.158620] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4e2d1773-c160-4bf4-975d-1e6df924c2be tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.170123] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.567481] env[62875]: DEBUG nova.compute.utils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2251.571878] env[62875]: DEBUG nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2251.572024] env[62875]: DEBUG nova.network.neutron [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2251.610407] env[62875]: DEBUG nova.policy [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52db0a44319f46939b47247136267ceb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5062c761ea34842a2f6179ae76f3465', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2251.624210] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180511, 'name': ReconfigVM_Task, 'duration_secs': 0.534225} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.624509] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Reconfigured VM instance instance-00000056 to attach disk [datastore2] d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf/d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2251.625111] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38d89ff6-d594-4dc2-be3d-f67c35f1882b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.632228] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2251.632228] env[62875]: value = "task-2180514" [ 2251.632228] env[62875]: _type = "Task" [ 2251.632228] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.641686] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180514, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.667600] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065997} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.669957] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2251.671123] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242538ad-e950-48c8-ab6d-7c34589fa4e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.693249] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 3458fc2b-a241-4492-9b65-f89b955b8c0b/3458fc2b-a241-4492-9b65-f89b955b8c0b.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2251.695953] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6a39efe-fb7e-410b-8294-4c75f469fc63 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.715208] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2251.715208] env[62875]: value = "task-2180515" [ 2251.715208] env[62875]: _type = "Task" [ 2251.715208] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.725082] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.737246] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9474742e-0a4b-487d-ba4d-a80c652b285b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.744288] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5c412a-ec94-4686-b1b7-e8ede7f6b146 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.775784] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0949fc06-d0f0-4e50-be68-4be4f240d472 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.783346] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec80469-bf0f-4a28-8d71-6c3cffc4f9e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.797359] env[62875]: DEBUG nova.compute.provider_tree [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2251.835220] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.835518] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.835750] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.835949] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.836162] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.838521] env[62875]: INFO nova.compute.manager [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Terminating instance [ 2251.893599] env[62875]: DEBUG nova.network.neutron [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Successfully created port: ac461c5e-0fb6-4e69-adcc-14af13eb6dca {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2252.072524] env[62875]: DEBUG nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2252.142184] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180514, 'name': Rename_Task, 'duration_secs': 0.192634} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.142486] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2252.142721] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71464a28-598a-407c-9ede-28aae520e3bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.149392] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2252.149392] env[62875]: value = "task-2180516" [ 2252.149392] env[62875]: _type = "Task" [ 2252.149392] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.157925] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.225199] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180515, 'name': ReconfigVM_Task, 'duration_secs': 0.317659} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.225494] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 3458fc2b-a241-4492-9b65-f89b955b8c0b/3458fc2b-a241-4492-9b65-f89b955b8c0b.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2252.226310] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfb93262-dc6e-4f6e-a4e4-0a824db1d9ea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.232584] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2252.232584] env[62875]: value = "task-2180517" [ 2252.232584] env[62875]: _type = "Task" [ 2252.232584] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.240696] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180517, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.300506] env[62875]: DEBUG nova.scheduler.client.report [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2252.341582] env[62875]: DEBUG nova.compute.manager [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2252.341811] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2252.342760] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b99cf0-2ca6-4594-a1fd-e3027a54a25c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.351950] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2252.352202] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e34c11cf-95d3-4fa4-99ae-b53ceadc292e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.659344] env[62875]: DEBUG oslo_vmware.api [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180516, 'name': PowerOnVM_Task, 'duration_secs': 0.446925} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.659672] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2252.659806] env[62875]: INFO nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Took 8.38 seconds to spawn the instance on the hypervisor. [ 2252.659952] env[62875]: DEBUG nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2252.660823] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a472276-c685-468a-8c98-c0617363f080 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.741815] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180517, 'name': Rename_Task, 'duration_secs': 0.143425} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.742179] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2252.742343] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f8ba2a4-5a14-486d-8074-217ab92f354a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.748680] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2252.748680] env[62875]: value = "task-2180519" [ 2252.748680] env[62875]: _type = "Task" [ 2252.748680] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.755843] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180519, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.806270] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.741s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.823877] env[62875]: INFO nova.scheduler.client.report [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Deleted allocations for instance f629aa16-0442-4659-9a9c-30f10136ae84 [ 2253.081350] env[62875]: DEBUG nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2253.106071] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2253.106330] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2253.106489] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2253.106670] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2253.106815] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2253.106963] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2253.107189] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2253.107354] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2253.107521] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2253.107685] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2253.107857] env[62875]: DEBUG nova.virt.hardware [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2253.108719] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2407b5d3-4709-4cae-9ff8-258b2ef41ae6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.116682] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1676ea-d3f6-469d-9798-accac6362e7c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.178853] env[62875]: INFO nova.compute.manager [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Took 15.91 seconds to build instance. [ 2253.258289] env[62875]: DEBUG oslo_vmware.api [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180519, 'name': PowerOnVM_Task, 'duration_secs': 0.427226} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.258791] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2253.259011] env[62875]: INFO nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Took 6.73 seconds to spawn the instance on the hypervisor. [ 2253.259230] env[62875]: DEBUG nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2253.259989] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4214d0-13a7-486e-9f5a-41dce49f741a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.331535] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e5ada502-d178-4a5a-96e4-b84cc641fd07 tempest-ServerAddressesTestJSON-242824213 tempest-ServerAddressesTestJSON-242824213-project-member] Lock "f629aa16-0442-4659-9a9c-30f10136ae84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.026s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2253.681160] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5efbbc7f-4352-44a0-a354-b233f0f2562c tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.415s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2253.777807] env[62875]: INFO nova.compute.manager [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Took 15.32 seconds to build instance. [ 2254.279980] env[62875]: DEBUG oslo_concurrency.lockutils [None req-45d8096a-d4bc-4ad6-b57e-4ea4e64618f2 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "3458fc2b-a241-4492-9b65-f89b955b8c0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.833s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.870120] env[62875]: DEBUG nova.compute.manager [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2254.871048] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a86594-df3a-4680-8877-0151e1dca73e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.129831] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "3458fc2b-a241-4492-9b65-f89b955b8c0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.132259] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "3458fc2b-a241-4492-9b65-f89b955b8c0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.132259] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "3458fc2b-a241-4492-9b65-f89b955b8c0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.132259] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "3458fc2b-a241-4492-9b65-f89b955b8c0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.132259] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "3458fc2b-a241-4492-9b65-f89b955b8c0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2255.134061] env[62875]: INFO nova.compute.manager [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Terminating instance [ 2255.382132] env[62875]: INFO nova.compute.manager [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] instance snapshotting [ 2255.384955] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68135d63-ab0c-4097-a575-6d86cfd660d3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.404569] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0fe0a4-7027-4e87-bd24-c2bc1297b983 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.639906] env[62875]: DEBUG nova.compute.manager [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2255.640121] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2255.641068] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a8b866-b665-456a-b389-d6da2d0568fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.648803] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2255.649054] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-96bd7e19-c25e-4ebe-a570-f81f16562041 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.654697] env[62875]: DEBUG oslo_vmware.api [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2255.654697] env[62875]: value = "task-2180520" [ 2255.654697] env[62875]: _type = "Task" [ 2255.654697] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.662649] env[62875]: DEBUG oslo_vmware.api [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180520, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.914950] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2255.915311] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-91478ea5-0d61-47da-86fe-467f86727a43 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.922231] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2255.922231] env[62875]: value = "task-2180521" [ 2255.922231] env[62875]: _type = "Task" [ 2255.922231] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.930713] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180521, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.166380] env[62875]: DEBUG oslo_vmware.api [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180520, 'name': PowerOffVM_Task, 'duration_secs': 0.185035} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2256.166650] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2256.166817] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2256.167078] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c87bd4d0-1e23-451f-bdd0-398fc9e93c01 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.433259] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180521, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.933416] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180521, 'name': CreateSnapshot_Task, 'duration_secs': 0.646679} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2256.933719] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2256.934429] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafc00be-a753-49f9-a952-2760cc036ffc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.452270] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2257.452583] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1f68e72d-b828-41fc-8bd9-8ecc5a18d7cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.462156] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2257.462156] env[62875]: value = "task-2180523" [ 2257.462156] env[62875]: _type = "Task" [ 2257.462156] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.470469] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180523, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.972375] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180523, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.473491] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180523, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.974475] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180523, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.337693] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2259.337924] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2259.338138] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleting the datastore file [datastore2] 36c909f2-5d06-4a3e-ace2-15d2e36b4a95 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2259.339295] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d78c4ad8-852d-4ee7-8043-769b74fbf9b5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.341158] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2259.341349] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2259.341521] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleting the datastore file [datastore2] 3458fc2b-a241-4492-9b65-f89b955b8c0b {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2259.341737] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6942556-65f8-4687-a90d-5c7184430b4e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.347452] env[62875]: DEBUG oslo_vmware.api [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for the task: (returnval){ [ 2259.347452] env[62875]: value = "task-2180524" [ 2259.347452] env[62875]: _type = "Task" [ 2259.347452] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.348686] env[62875]: DEBUG oslo_vmware.api [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for the task: (returnval){ [ 2259.348686] env[62875]: value = "task-2180525" [ 2259.348686] env[62875]: _type = "Task" [ 2259.348686] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.359133] env[62875]: DEBUG oslo_vmware.api [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.362147] env[62875]: DEBUG oslo_vmware.api [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.474980] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180523, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.863232] env[62875]: DEBUG oslo_vmware.api [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Task: {'id': task-2180524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171283} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.863506] env[62875]: DEBUG oslo_vmware.api [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Task: {'id': task-2180525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127302} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.863729] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2259.863910] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2259.864106] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2259.864287] env[62875]: INFO nova.compute.manager [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Took 7.52 seconds to destroy the instance on the hypervisor. [ 2259.864523] env[62875]: DEBUG oslo.service.loopingcall [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2259.864740] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2259.864909] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2259.865092] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2259.865258] env[62875]: INFO nova.compute.manager [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Took 4.23 seconds to destroy the instance on the hypervisor. [ 2259.865466] env[62875]: DEBUG oslo.service.loopingcall [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2259.865648] env[62875]: DEBUG nova.compute.manager [-] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2259.865745] env[62875]: DEBUG nova.network.neutron [-] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2259.867261] env[62875]: DEBUG nova.compute.manager [-] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2259.867364] env[62875]: DEBUG nova.network.neutron [-] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2259.975628] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180523, 'name': CloneVM_Task} progress is 95%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.391832] env[62875]: DEBUG nova.compute.manager [req-9ab94c70-29f3-4efd-8011-050666ec1842 req-66135bfb-d97d-42d9-8e96-745bad8e0e31 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Received event network-vif-deleted-0997f285-a150-4272-8178-5e00f47156d5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2260.391832] env[62875]: INFO nova.compute.manager [req-9ab94c70-29f3-4efd-8011-050666ec1842 req-66135bfb-d97d-42d9-8e96-745bad8e0e31 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Neutron deleted interface 0997f285-a150-4272-8178-5e00f47156d5; detaching it from the instance and deleting it from the info cache [ 2260.391832] env[62875]: DEBUG nova.network.neutron [req-9ab94c70-29f3-4efd-8011-050666ec1842 req-66135bfb-d97d-42d9-8e96-745bad8e0e31 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2260.476676] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180523, 'name': CloneVM_Task, 'duration_secs': 2.799267} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.476877] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Created linked-clone VM from snapshot [ 2260.477630] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ad77ba-bb15-493b-88b5-c3680648d3f3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.484947] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Uploading image 28a2c66e-6b03-42d8-b982-691d504d196e {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2260.506321] env[62875]: DEBUG oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2260.506321] env[62875]: value = "vm-444985" [ 2260.506321] env[62875]: _type = "VirtualMachine" [ 2260.506321] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2260.506579] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c872aa06-957a-4573-82f5-68c730394b2b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.513383] env[62875]: DEBUG oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lease: (returnval){ [ 2260.513383] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522024b3-134a-5b48-49c1-3c4be2ff2e1c" [ 2260.513383] env[62875]: _type = "HttpNfcLease" [ 2260.513383] env[62875]: } obtained for exporting VM: (result){ [ 2260.513383] env[62875]: value = "vm-444985" [ 2260.513383] env[62875]: _type = "VirtualMachine" [ 2260.513383] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2260.513726] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the lease: (returnval){ [ 2260.513726] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522024b3-134a-5b48-49c1-3c4be2ff2e1c" [ 2260.513726] env[62875]: _type = "HttpNfcLease" [ 2260.513726] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2260.519836] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2260.519836] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522024b3-134a-5b48-49c1-3c4be2ff2e1c" [ 2260.519836] env[62875]: _type = "HttpNfcLease" [ 2260.519836] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2260.794326] env[62875]: DEBUG nova.compute.manager [req-93e800ce-47e5-46a8-b5da-f80a2534ddef req-2ccef849-5c4c-419d-8542-e3f4bbfeb99d service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Received event network-vif-plugged-ac461c5e-0fb6-4e69-adcc-14af13eb6dca {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2260.794615] env[62875]: DEBUG oslo_concurrency.lockutils [req-93e800ce-47e5-46a8-b5da-f80a2534ddef req-2ccef849-5c4c-419d-8542-e3f4bbfeb99d service nova] Acquiring lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.794798] env[62875]: DEBUG oslo_concurrency.lockutils [req-93e800ce-47e5-46a8-b5da-f80a2534ddef req-2ccef849-5c4c-419d-8542-e3f4bbfeb99d service nova] Lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2260.794971] env[62875]: DEBUG oslo_concurrency.lockutils [req-93e800ce-47e5-46a8-b5da-f80a2534ddef req-2ccef849-5c4c-419d-8542-e3f4bbfeb99d service nova] Lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.795426] env[62875]: DEBUG nova.compute.manager [req-93e800ce-47e5-46a8-b5da-f80a2534ddef req-2ccef849-5c4c-419d-8542-e3f4bbfeb99d service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] No waiting events found dispatching network-vif-plugged-ac461c5e-0fb6-4e69-adcc-14af13eb6dca {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2260.795654] env[62875]: WARNING nova.compute.manager [req-93e800ce-47e5-46a8-b5da-f80a2534ddef req-2ccef849-5c4c-419d-8542-e3f4bbfeb99d service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Received unexpected event network-vif-plugged-ac461c5e-0fb6-4e69-adcc-14af13eb6dca for instance with vm_state building and task_state spawning. [ 2260.868907] env[62875]: DEBUG nova.network.neutron [-] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2260.870161] env[62875]: DEBUG nova.network.neutron [-] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2260.886690] env[62875]: DEBUG nova.network.neutron [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Successfully updated port: ac461c5e-0fb6-4e69-adcc-14af13eb6dca {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2260.893406] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-231971c1-b842-44ba-bdbd-0a88124e654f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.903327] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b431ec8-fbd4-4e94-80c4-dd43d4899ef9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.931351] env[62875]: DEBUG nova.compute.manager [req-9ab94c70-29f3-4efd-8011-050666ec1842 req-66135bfb-d97d-42d9-8e96-745bad8e0e31 service nova] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Detach interface failed, port_id=0997f285-a150-4272-8178-5e00f47156d5, reason: Instance 36c909f2-5d06-4a3e-ace2-15d2e36b4a95 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2261.022098] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2261.022098] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522024b3-134a-5b48-49c1-3c4be2ff2e1c" [ 2261.022098] env[62875]: _type = "HttpNfcLease" [ 2261.022098] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2261.022545] env[62875]: DEBUG oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2261.022545] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522024b3-134a-5b48-49c1-3c4be2ff2e1c" [ 2261.022545] env[62875]: _type = "HttpNfcLease" [ 2261.022545] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2261.023174] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98793d6-9896-4ac1-91bb-ba1a9fe644be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.030472] env[62875]: DEBUG oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bc7e0f-b19f-88a3-abd1-93970cefb3cd/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2261.030649] env[62875]: DEBUG oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bc7e0f-b19f-88a3-abd1-93970cefb3cd/disk-0.vmdk for reading. {{(pid=62875) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2261.185444] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-740ce262-9c65-4f79-ae09-2f806a33fe58 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.373350] env[62875]: INFO nova.compute.manager [-] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Took 1.51 seconds to deallocate network for instance. [ 2261.373690] env[62875]: INFO nova.compute.manager [-] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Took 1.51 seconds to deallocate network for instance. [ 2261.389991] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2261.390301] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2261.390512] env[62875]: DEBUG nova.network.neutron [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2261.883541] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.883777] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.884056] env[62875]: DEBUG nova.objects.instance [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lazy-loading 'resources' on Instance uuid 36c909f2-5d06-4a3e-ace2-15d2e36b4a95 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2261.885926] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.928504] env[62875]: DEBUG nova.network.neutron [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2262.283074] env[62875]: DEBUG nova.network.neutron [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance_info_cache with network_info: [{"id": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "address": "fa:16:3e:9a:24:e6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac461c5e-0f", "ovs_interfaceid": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2262.419941] env[62875]: DEBUG nova.compute.manager [req-48074a5b-e5da-4e55-b355-8c7f2341448b req-dff21d63-9ffa-422b-a998-51891834c817 service nova] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Received event network-vif-deleted-b827e10c-2333-47ac-b3df-d384edbf2261 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2262.533050] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be88ff77-6dde-4ab0-b083-bac1a3fc8f98 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.540892] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a80fde0-58fb-47ef-9e4b-8e8dbca95da6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.572723] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c5563e-5ce2-45b4-8eb1-84de958b0032 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.580323] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7be155-b788-4281-a9b4-cff189fc229a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.593805] env[62875]: DEBUG nova.compute.provider_tree [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2262.785904] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2262.786267] env[62875]: DEBUG nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Instance network_info: |[{"id": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "address": "fa:16:3e:9a:24:e6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac461c5e-0f", "ovs_interfaceid": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2262.786717] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:24:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b2049d7-f99e-425a-afdb-2c95ca88e483', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac461c5e-0fb6-4e69-adcc-14af13eb6dca', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2262.794646] env[62875]: DEBUG oslo.service.loopingcall [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2262.794870] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2262.795104] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a8761b6-127f-4599-976d-7c6a9bb010c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.815222] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2262.815222] env[62875]: value = "task-2180527" [ 2262.815222] env[62875]: _type = "Task" [ 2262.815222] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.820932] env[62875]: DEBUG nova.compute.manager [req-a9a6df8c-37bd-4f87-bacc-8052656efaee req-6d2a37ae-a767-4aed-98d5-10fa9cc50886 service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Received event network-changed-ac461c5e-0fb6-4e69-adcc-14af13eb6dca {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2262.821134] env[62875]: DEBUG nova.compute.manager [req-a9a6df8c-37bd-4f87-bacc-8052656efaee req-6d2a37ae-a767-4aed-98d5-10fa9cc50886 service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Refreshing instance network info cache due to event network-changed-ac461c5e-0fb6-4e69-adcc-14af13eb6dca. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2262.821353] env[62875]: DEBUG oslo_concurrency.lockutils [req-a9a6df8c-37bd-4f87-bacc-8052656efaee req-6d2a37ae-a767-4aed-98d5-10fa9cc50886 service nova] Acquiring lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2262.821498] env[62875]: DEBUG oslo_concurrency.lockutils [req-a9a6df8c-37bd-4f87-bacc-8052656efaee req-6d2a37ae-a767-4aed-98d5-10fa9cc50886 service nova] Acquired lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2262.821656] env[62875]: DEBUG nova.network.neutron [req-a9a6df8c-37bd-4f87-bacc-8052656efaee req-6d2a37ae-a767-4aed-98d5-10fa9cc50886 service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Refreshing network info cache for port ac461c5e-0fb6-4e69-adcc-14af13eb6dca {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2262.829440] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180527, 'name': CreateVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.096836] env[62875]: DEBUG nova.scheduler.client.report [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2263.327827] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180527, 'name': CreateVM_Task, 'duration_secs': 0.397698} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.328233] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2263.328862] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2263.329041] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2263.329361] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2263.329619] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c60d52f-e2e9-409b-a864-7634ed5ce3d5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.334331] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2263.334331] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5220e706-bbf1-ff2b-42dd-9a0552a28760" [ 2263.334331] env[62875]: _type = "Task" [ 2263.334331] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.342128] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5220e706-bbf1-ff2b-42dd-9a0552a28760, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.522129] env[62875]: DEBUG nova.network.neutron [req-a9a6df8c-37bd-4f87-bacc-8052656efaee req-6d2a37ae-a767-4aed-98d5-10fa9cc50886 service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updated VIF entry in instance network info cache for port ac461c5e-0fb6-4e69-adcc-14af13eb6dca. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2263.522962] env[62875]: DEBUG nova.network.neutron [req-a9a6df8c-37bd-4f87-bacc-8052656efaee req-6d2a37ae-a767-4aed-98d5-10fa9cc50886 service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance_info_cache with network_info: [{"id": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "address": "fa:16:3e:9a:24:e6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac461c5e-0f", "ovs_interfaceid": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2263.604778] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2263.607755] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.721s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2263.608031] env[62875]: DEBUG nova.objects.instance [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lazy-loading 'resources' on Instance uuid 3458fc2b-a241-4492-9b65-f89b955b8c0b {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2263.623714] env[62875]: INFO nova.scheduler.client.report [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Deleted allocations for instance 36c909f2-5d06-4a3e-ace2-15d2e36b4a95 [ 2263.845589] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5220e706-bbf1-ff2b-42dd-9a0552a28760, 'name': SearchDatastore_Task, 'duration_secs': 0.01093} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.845947] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2263.846210] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2263.846491] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2263.846639] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2263.846818] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2263.847122] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4b6c77d-e0b9-4510-bed7-1d1bfc975313 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.856828] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2263.857039] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2263.857793] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56a3251e-6b5f-415f-90fc-d19ad1e4084c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.863411] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2263.863411] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d27a16-dd35-0778-2f3e-becaed15ea2e" [ 2263.863411] env[62875]: _type = "Task" [ 2263.863411] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.871662] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d27a16-dd35-0778-2f3e-becaed15ea2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.026251] env[62875]: DEBUG oslo_concurrency.lockutils [req-a9a6df8c-37bd-4f87-bacc-8052656efaee req-6d2a37ae-a767-4aed-98d5-10fa9cc50886 service nova] Releasing lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2264.131486] env[62875]: DEBUG oslo_concurrency.lockutils [None req-48a12bc4-a51a-48c0-b8b4-d9889cbe09af tempest-ServersTestJSON-1595867321 tempest-ServersTestJSON-1595867321-project-member] Lock "36c909f2-5d06-4a3e-ace2-15d2e36b4a95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.296s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2264.242719] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee17557-c0bf-436c-9bed-e79b63af4c9f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.251188] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b65875b-cd07-4f65-b73d-3bc8bdff4157 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.283355] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8330b1b0-7134-4e0a-83dc-0dec51428ace {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.291189] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0ad05f-443b-49ab-83ee-e1c5a4e637f9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.304675] env[62875]: DEBUG nova.compute.provider_tree [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2264.374160] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d27a16-dd35-0778-2f3e-becaed15ea2e, 'name': SearchDatastore_Task, 'duration_secs': 0.009224} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.374948] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d6b0050-c956-4556-8273-3098ee154bd6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.380438] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2264.380438] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529282e1-75e6-e573-7562-6e558df166f7" [ 2264.380438] env[62875]: _type = "Task" [ 2264.380438] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.388439] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529282e1-75e6-e573-7562-6e558df166f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.808061] env[62875]: DEBUG nova.scheduler.client.report [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2264.892690] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529282e1-75e6-e573-7562-6e558df166f7, 'name': SearchDatastore_Task, 'duration_secs': 0.010122} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.892989] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2264.893337] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] f3297565-541b-4a8f-a753-419b6e953ff0/f3297565-541b-4a8f-a753-419b6e953ff0.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2264.893615] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd7c5913-1299-4b62-a724-9df59b6e91a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.900972] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2264.900972] env[62875]: value = "task-2180528" [ 2264.900972] env[62875]: _type = "Task" [ 2264.900972] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.908944] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.314031] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.707s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.340627] env[62875]: INFO nova.scheduler.client.report [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Deleted allocations for instance 3458fc2b-a241-4492-9b65-f89b955b8c0b [ 2265.410508] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180528, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479978} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.410829] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] f3297565-541b-4a8f-a753-419b6e953ff0/f3297565-541b-4a8f-a753-419b6e953ff0.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2265.411018] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2265.411304] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dab3a872-57d3-47e7-a411-ba9889e82f1d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.417948] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2265.417948] env[62875]: value = "task-2180529" [ 2265.417948] env[62875]: _type = "Task" [ 2265.417948] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.425565] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.849255] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6c381238-aaee-41c8-80e8-ce4ecde24b16 tempest-ServerDiskConfigTestJSON-1091453023 tempest-ServerDiskConfigTestJSON-1091453023-project-member] Lock "3458fc2b-a241-4492-9b65-f89b955b8c0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.719s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.928050] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081919} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.928387] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2265.929284] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6769b2f2-9cac-4f2d-a151-fbe281b752c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.955247] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] f3297565-541b-4a8f-a753-419b6e953ff0/f3297565-541b-4a8f-a753-419b6e953ff0.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2265.955610] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48e40f29-5aad-487c-a0b3-db5d27580f0c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.976260] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2265.976260] env[62875]: value = "task-2180530" [ 2265.976260] env[62875]: _type = "Task" [ 2265.976260] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.984962] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.486508] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180530, 'name': ReconfigVM_Task, 'duration_secs': 0.292113} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.486834] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Reconfigured VM instance instance-00000058 to attach disk [datastore1] f3297565-541b-4a8f-a753-419b6e953ff0/f3297565-541b-4a8f-a753-419b6e953ff0.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2266.487515] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d5e1c06-7e9f-4775-8e18-52559066bef1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.494687] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2266.494687] env[62875]: value = "task-2180531" [ 2266.494687] env[62875]: _type = "Task" [ 2266.494687] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.503438] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180531, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.008935] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180531, 'name': Rename_Task, 'duration_secs': 0.132579} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.009773] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2267.010519] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba892dee-4bda-432d-8eb4-0d92479e24cf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.019622] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2267.019622] env[62875]: value = "task-2180532" [ 2267.019622] env[62875]: _type = "Task" [ 2267.019622] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.031039] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180532, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.048035] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.048035] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.532729] env[62875]: DEBUG oslo_vmware.api [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180532, 'name': PowerOnVM_Task, 'duration_secs': 0.506746} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.533886] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2267.534293] env[62875]: INFO nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Took 14.45 seconds to spawn the instance on the hypervisor. [ 2267.534742] env[62875]: DEBUG nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2267.535933] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84df697f-5ec9-4c2c-ba96-d04abb4d0dba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.551551] env[62875]: DEBUG nova.compute.manager [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2267.994641] env[62875]: DEBUG oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bc7e0f-b19f-88a3-abd1-93970cefb3cd/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2267.995643] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f522655c-c9b0-424f-8495-141e34b8ec37 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.001932] env[62875]: DEBUG oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bc7e0f-b19f-88a3-abd1-93970cefb3cd/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2268.002355] env[62875]: ERROR oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bc7e0f-b19f-88a3-abd1-93970cefb3cd/disk-0.vmdk due to incomplete transfer. [ 2268.002355] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a203cac2-1454-4742-9b46-73373da108fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.009420] env[62875]: DEBUG oslo_vmware.rw_handles [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bc7e0f-b19f-88a3-abd1-93970cefb3cd/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2268.009613] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Uploaded image 28a2c66e-6b03-42d8-b982-691d504d196e to the Glance image server {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2268.011821] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2268.012063] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ebdc83d2-ad2a-4714-bf51-3e16f6bd3c38 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.017735] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2268.017735] env[62875]: value = "task-2180533" [ 2268.017735] env[62875]: _type = "Task" [ 2268.017735] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.025293] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180533, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.060644] env[62875]: INFO nova.compute.manager [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Took 19.27 seconds to build instance. [ 2268.075967] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.076246] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.077832] env[62875]: INFO nova.compute.claims [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2268.527474] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180533, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.563278] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6050098d-e4d7-4a47-816e-b26e73a0c971 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.781s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.027631] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180533, 'name': Destroy_Task, 'duration_secs': 0.56615} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.027939] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Destroyed the VM [ 2269.028151] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2269.028400] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-781e015b-2ddf-4377-8ce3-b36d337ff823 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.034657] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2269.034657] env[62875]: value = "task-2180534" [ 2269.034657] env[62875]: _type = "Task" [ 2269.034657] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2269.042959] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180534, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.207971] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f465b0d7-3cb5-4b1e-9e88-b5b0f35fd0b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.215892] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1b1e7b-f20f-47bb-aaeb-e48416e01a58 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.249726] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97c4362-20cd-4030-9861-f222b7719f62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.259199] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf58b2d-4fe6-46a8-8d91-81a475cc5f18 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.274274] env[62875]: DEBUG nova.compute.provider_tree [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2269.544915] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180534, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.783029] env[62875]: DEBUG nova.scheduler.client.report [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2270.049025] env[62875]: DEBUG oslo_vmware.api [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180534, 'name': RemoveSnapshot_Task, 'duration_secs': 0.962821} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2270.049025] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2270.049025] env[62875]: INFO nova.compute.manager [None req-e6b109ec-4396-47a6-b2ad-b070ece43171 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Took 14.66 seconds to snapshot the instance on the hypervisor. [ 2270.143208] env[62875]: DEBUG nova.compute.manager [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Stashing vm_state: active {{(pid=62875) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2270.285956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2270.286583] env[62875]: DEBUG nova.compute.manager [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2270.662882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.663204] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.792346] env[62875]: DEBUG nova.compute.utils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2270.793912] env[62875]: DEBUG nova.compute.manager [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Not allocating networking since 'none' was specified. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2271.169100] env[62875]: INFO nova.compute.claims [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2271.214893] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2271.215136] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2271.215301] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2271.215458] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2271.215599] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2271.298969] env[62875]: DEBUG nova.compute.manager [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2271.363137] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.363415] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.394831] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "4356506b-ff11-43bb-84be-35ea0fe90cb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.395088] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "4356506b-ff11-43bb-84be-35ea0fe90cb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.676772] env[62875]: INFO nova.compute.resource_tracker [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating resource usage from migration 64ce0f6d-8e14-4f10-a806-91824902a197 [ 2271.834304] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd7077c-223b-4ef6-91f6-a19024697768 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.842460] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40195a7-c1df-4743-8700-13219a11d826 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.872528] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2271.878534] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97537fb-51ae-41f8-af2c-737a1a98ead8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.884020] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ffe4ea-756a-4eeb-ad63-04d63e117d63 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.897962] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2271.900500] env[62875]: DEBUG nova.compute.provider_tree [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2272.310122] env[62875]: DEBUG nova.compute.manager [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2272.335249] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2272.335520] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2272.335682] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2272.335860] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2272.336011] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2272.336165] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2272.336376] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2272.336565] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2272.336734] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2272.336896] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2272.337079] env[62875]: DEBUG nova.virt.hardware [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2272.337936] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a46356-f3fe-4422-8656-2fe12694ac17 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.346073] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165871f2-0484-47b6-ae02-6197e54016b0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.360440] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2272.365389] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Creating folder: Project (ab49add3722147128a1717f09058b47d). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2272.365670] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8d0980b-5231-45a7-b765-dbe71db4fe59 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.375252] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Created folder: Project (ab49add3722147128a1717f09058b47d) in parent group-v444854. [ 2272.375444] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Creating folder: Instances. Parent ref: group-v444987. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2272.375668] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5110423c-f96f-40ff-b224-20464fd06536 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.384229] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Created folder: Instances in parent group-v444987. [ 2272.384407] env[62875]: DEBUG oslo.service.loopingcall [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2272.384589] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2272.384784] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7bcb4fb8-96fe-413b-8ef5-2f7371e59b54 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.398204] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2272.407209] env[62875]: DEBUG nova.scheduler.client.report [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2272.410342] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2272.410342] env[62875]: value = "task-2180537" [ 2272.410342] env[62875]: _type = "Task" [ 2272.410342] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.418860] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180537, 'name': CreateVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.420368] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2272.650714] env[62875]: DEBUG nova.compute.manager [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2272.651644] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c25e1f-ef16-4c47-84ec-e4fb570ba5df {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.706467] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.911992] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.248s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2272.912286] env[62875]: INFO nova.compute.manager [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Migrating [ 2272.919137] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.521s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2272.920764] env[62875]: INFO nova.compute.claims [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2272.941744] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180537, 'name': CreateVM_Task, 'duration_secs': 0.242731} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.942176] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2272.942344] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2272.942605] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2272.943174] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2272.944063] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd942521-0763-4db2-86b7-f7edf84f05a3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.948868] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2272.948868] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52192d89-689a-548c-9673-4fabe4d96099" [ 2272.948868] env[62875]: _type = "Task" [ 2272.948868] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.958268] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52192d89-689a-548c-9673-4fabe4d96099, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.162692] env[62875]: INFO nova.compute.manager [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] instance snapshotting [ 2273.165399] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a528e50-6935-4472-b004-55614901b13d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.185322] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2be2822-ce2c-48d4-8774-d3dfe4a21724 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.436811] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2273.437023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2273.437164] env[62875]: DEBUG nova.network.neutron [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2273.464027] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52192d89-689a-548c-9673-4fabe4d96099, 'name': SearchDatastore_Task, 'duration_secs': 0.008951} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.464027] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2273.464027] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2273.464027] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2273.464027] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2273.464299] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2273.464367] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73c51913-8a9c-497c-ace2-af6d32f0f58e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.472845] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2273.473021] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2273.473743] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c67b898-caf9-43e1-86e8-7fa1302cc6e1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.478662] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2273.478662] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521a16a1-7eac-033e-dfc8-165b39160553" [ 2273.478662] env[62875]: _type = "Task" [ 2273.478662] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.486055] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521a16a1-7eac-033e-dfc8-165b39160553, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.695990] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2273.696282] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bc972159-d0fa-4f28-a04f-15f1688fe2e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.703863] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2273.703863] env[62875]: value = "task-2180538" [ 2273.703863] env[62875]: _type = "Task" [ 2273.703863] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.712081] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180538, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.989150] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521a16a1-7eac-033e-dfc8-165b39160553, 'name': SearchDatastore_Task, 'duration_secs': 0.012888} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.992438] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cbd2a66-9b11-47b9-9729-36c9a106c7f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.997893] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2273.997893] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52abeb82-9bb9-283a-0b9a-f45dcded2f5a" [ 2273.997893] env[62875]: _type = "Task" [ 2273.997893] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.009391] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52abeb82-9bb9-283a-0b9a-f45dcded2f5a, 'name': SearchDatastore_Task, 'duration_secs': 0.009227} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.009709] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2274.009963] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58/84eca63b-87dc-4af4-a2dd-f489a06dcd58.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2274.010246] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8edbc225-84a7-4e4a-a730-9064bbc118a6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.015930] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2274.015930] env[62875]: value = "task-2180539" [ 2274.015930] env[62875]: _type = "Task" [ 2274.015930] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.023439] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.109082] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8bc9e0-5b9c-482c-9e6b-5ccc8fca4ab4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.116940] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69acdbf-4ae3-49a7-ac9e-120af0a80078 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.151331] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ede431a-d303-4991-9ad4-6d86d1aad8a4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.159601] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3559848e-d78b-4378-841d-5959cc3e19a7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.173600] env[62875]: DEBUG nova.compute.provider_tree [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2274.191407] env[62875]: DEBUG nova.network.neutron [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance_info_cache with network_info: [{"id": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "address": "fa:16:3e:9a:24:e6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac461c5e-0f", "ovs_interfaceid": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2274.213969] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180538, 'name': CreateSnapshot_Task, 'duration_secs': 0.458375} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.214271] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2274.215077] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf28e80-6d14-494a-b678-dfdf38137bd6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.525251] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180539, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484416} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.525517] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58/84eca63b-87dc-4af4-a2dd-f489a06dcd58.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2274.525731] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2274.525972] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e1d5ff4-3c7f-4014-a027-86640aafd76a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.532459] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2274.532459] env[62875]: value = "task-2180540" [ 2274.532459] env[62875]: _type = "Task" [ 2274.532459] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.539836] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180540, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.679329] env[62875]: DEBUG nova.scheduler.client.report [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2274.694504] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2274.706069] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2274.733756] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2274.733930] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fcee8f0a-d1b2-4d2c-8670-2c4464d8eaa2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.742852] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2274.742852] env[62875]: value = "task-2180541" [ 2274.742852] env[62875]: _type = "Task" [ 2274.742852] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.750940] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.042463] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180540, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063891} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.042778] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2275.043573] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ace80df-ffdc-49fb-9251-62551a83d501 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.064850] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58/84eca63b-87dc-4af4-a2dd-f489a06dcd58.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2275.065159] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acf2b9fa-5d6d-45c1-ae46-31bf67da0363 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.085608] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2275.085608] env[62875]: value = "task-2180542" [ 2275.085608] env[62875]: _type = "Task" [ 2275.085608] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.095959] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180542, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.184275] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.265s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2275.184854] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2275.187461] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.767s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2275.188883] env[62875]: INFO nova.compute.claims [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2275.253438] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.596754] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180542, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.694225] env[62875]: DEBUG nova.compute.utils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2275.697241] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2275.697414] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2275.705958] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.738920] env[62875]: DEBUG nova.policy [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec72ce9630d1427a93aeed584e2c989e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4f66deaad8e44f8b352e02a74ca6613', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2275.753354] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.009141] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Successfully created port: 56a3ac28-628c-4b11-a501-4f0c95923e8a {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2276.097121] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180542, 'name': ReconfigVM_Task, 'duration_secs': 0.64455} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.097419] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58/84eca63b-87dc-4af4-a2dd-f489a06dcd58.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2276.098009] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2e73daa-57bd-4f9c-af19-8f98f909c492 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.104081] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2276.104081] env[62875]: value = "task-2180543" [ 2276.104081] env[62875]: _type = "Task" [ 2276.104081] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.112540] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180543, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.198431] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2276.211832] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5819de-7579-4e3d-90ef-70f3a37c7f76 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.234076] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance 'f3297565-541b-4a8f-a753-419b6e953ff0' progress to 0 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2276.255614] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.358264] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5990f640-cb26-47b6-b34a-c98b15085959 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.365284] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52f3364-34cd-447f-b1ef-de3d5115afff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.394383] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89804d6a-51e5-4e1a-bc4f-709433fa4f11 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.401454] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ee1665-75f8-4fe4-a542-3962753e09ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.414585] env[62875]: DEBUG nova.compute.provider_tree [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2276.615850] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180543, 'name': Rename_Task, 'duration_secs': 0.131524} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.616208] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2276.616516] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2a48452-9500-4335-98eb-9640318abaa0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.622760] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2276.622760] env[62875]: value = "task-2180544" [ 2276.622760] env[62875]: _type = "Task" [ 2276.622760] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.630027] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.740306] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2276.741015] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e15d079-e3f8-4cba-94f3-f5e6a2475769 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.750823] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2276.750823] env[62875]: value = "task-2180545" [ 2276.750823] env[62875]: _type = "Task" [ 2276.750823] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.759199] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.764308] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.918118] env[62875]: DEBUG nova.scheduler.client.report [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2277.137030] env[62875]: DEBUG oslo_vmware.api [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180544, 'name': PowerOnVM_Task, 'duration_secs': 0.405362} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2277.137496] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2277.137648] env[62875]: INFO nova.compute.manager [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Took 4.83 seconds to spawn the instance on the hypervisor. [ 2277.137898] env[62875]: DEBUG nova.compute.manager [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2277.139012] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c076371c-18df-42db-86b1-251d620ebd92 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.211514] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2277.232557] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2277.232806] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2277.232960] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2277.233158] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2277.233303] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2277.233457] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2277.233696] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2277.233860] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2277.234037] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2277.234205] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2277.234379] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2277.235264] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18ad641-8b81-4319-83ba-9194077c2ac8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.243053] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169e618c-2027-429f-9ca3-88af2e1a4f86 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.264469] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.268837] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180545, 'name': PowerOffVM_Task, 'duration_secs': 0.21711} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2277.269079] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2277.269262] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance 'f3297565-541b-4a8f-a753-419b6e953ff0' progress to 17 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2277.423537] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.424081] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2277.658294] env[62875]: INFO nova.compute.manager [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Took 9.60 seconds to build instance. [ 2277.756148] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.775338] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2277.775706] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2277.775995] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2277.776317] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2277.776618] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2277.776863] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2277.777254] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2277.777540] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2277.777795] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2277.778093] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2277.778307] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2277.786590] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0abaa2ff-39e6-49bb-b9e1-2c12142dc11e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.811061] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2277.811061] env[62875]: value = "task-2180547" [ 2277.811061] env[62875]: _type = "Task" [ 2277.811061] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2277.822381] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180547, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.929251] env[62875]: DEBUG nova.compute.utils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2277.930673] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2277.930880] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2277.979942] env[62875]: DEBUG nova.policy [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec72ce9630d1427a93aeed584e2c989e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4f66deaad8e44f8b352e02a74ca6613', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2278.160737] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c6e2e13e-739b-43f1-b49d-ebd2125c4f65 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.113s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2278.257424] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.320671] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180547, 'name': ReconfigVM_Task, 'duration_secs': 0.206186} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2278.321043] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance 'f3297565-541b-4a8f-a753-419b6e953ff0' progress to 33 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2278.329806] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Successfully created port: 84ebcc4e-2df0-4dbb-81b5-17618136bfa5 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2278.434030] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2278.706330] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2278.706501] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2278.757166] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.827337] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2278.827586] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2278.827749] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2278.827931] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2278.828106] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2278.828259] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2278.828468] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2278.828631] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2278.828796] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2278.828960] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2278.829186] env[62875]: DEBUG nova.virt.hardware [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2278.834751] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2278.835048] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d0c1123-c5da-4b9c-a4ae-763aa44386f0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.854792] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2278.854792] env[62875]: value = "task-2180548" [ 2278.854792] env[62875]: _type = "Task" [ 2278.854792] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2278.864437] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180548, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.949394] env[62875]: INFO nova.compute.manager [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Rebuilding instance [ 2278.989929] env[62875]: DEBUG nova.compute.manager [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2278.990818] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111ec7a8-cbf8-4f3f-a4de-4bacfe48528b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.236444] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2279.236731] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2279.236731] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2279.257961] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.364308] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180548, 'name': ReconfigVM_Task, 'duration_secs': 0.243738} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2279.364629] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2279.365408] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c94d7bd-531f-43ba-9e84-6c80524e1163 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.386593] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] f3297565-541b-4a8f-a753-419b6e953ff0/f3297565-541b-4a8f-a753-419b6e953ff0.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2279.386835] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb6b57ce-4442-4979-bb4d-6702f62379a4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.405216] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2279.405216] env[62875]: value = "task-2180549" [ 2279.405216] env[62875]: _type = "Task" [ 2279.405216] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.414570] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180549, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.444342] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2279.466831] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2279.467090] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2279.467252] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2279.467436] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2279.467586] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2279.467734] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2279.467943] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2279.468127] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2279.468297] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2279.468462] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2279.468635] env[62875]: DEBUG nova.virt.hardware [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2279.469508] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175d1079-4e78-4d42-9a1e-9d71ecb60379 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.477410] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5898f56-4354-493e-834c-f0a16374e690 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.759885] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.914847] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180549, 'name': ReconfigVM_Task, 'duration_secs': 0.256833} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2279.915124] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Reconfigured VM instance instance-00000058 to attach disk [datastore1] f3297565-541b-4a8f-a753-419b6e953ff0/f3297565-541b-4a8f-a753-419b6e953ff0.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2279.915386] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance 'f3297565-541b-4a8f-a753-419b6e953ff0' progress to 50 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2280.004727] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2280.005076] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82c4f40b-ab97-486c-91c3-e1110dd43c0b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.013377] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2280.013377] env[62875]: value = "task-2180550" [ 2280.013377] env[62875]: _type = "Task" [ 2280.013377] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.021719] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180550, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.259987] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.421633] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d163a331-9836-4a36-9a47-be73a45025ab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.440583] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc60d0f9-53bb-4b86-b86e-8ef6022836fe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.457870] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance 'f3297565-541b-4a8f-a753-419b6e953ff0' progress to 67 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2280.494520] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Updating instance_info_cache with network_info: [{"id": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "address": "fa:16:3e:8b:f8:16", "network": {"id": "86c8f999-67e2-4985-82bb-3f9c44f0fbb8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1235728423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48cead7352349dbab0d47c19e048eae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a5613b-53", "ovs_interfaceid": "c4a5613b-5345-49d7-b791-29a0dbe58ed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2280.522729] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180550, 'name': PowerOffVM_Task, 'duration_secs': 0.220932} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2280.522980] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2280.523224] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2280.523974] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1fbf07-c667-4ad3-896b-1eccb97b287b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.530486] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2280.530695] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f31ea81c-0d8d-47a4-8755-0233fbaaa8d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.554244] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2280.554409] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2280.554585] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Deleting the datastore file [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2280.554810] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2f01889-def5-4298-ab70-9997df4e8ac0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.560683] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2280.560683] env[62875]: value = "task-2180552" [ 2280.560683] env[62875]: _type = "Task" [ 2280.560683] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.570663] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.760672] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.997424] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-9e0aaea6-96cf-494d-9f70-a709a47f9772" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2280.997424] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2280.998056] env[62875]: DEBUG nova.network.neutron [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Port ac461c5e-0fb6-4e69-adcc-14af13eb6dca binding to destination host cpu-1 is already ACTIVE {{(pid=62875) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2280.999350] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2281.070992] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093393} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.071257] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2281.071441] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2281.071631] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2281.260715] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.506189] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2281.506417] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2281.506582] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2281.506733] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2281.507778] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d970c06-cf93-40b5-99fc-00fa1ba8ce0b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.516111] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc10cbe-ecc3-4742-a137-93ad6ccc2dac {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.529756] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b57a45-f9e0-42c3-9c54-7c1e5b6778a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.536175] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f573fe-bcef-4de3-848e-3058e0d0878d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.564477] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179735MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2281.564624] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2281.564821] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2281.761840] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.021545] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.021781] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2282.021957] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2282.099431] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2282.099677] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2282.099834] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2282.100025] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2282.100183] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2282.100329] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2282.100568] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2282.100735] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2282.100921] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2282.101125] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2282.101303] env[62875]: DEBUG nova.virt.hardware [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2282.102162] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edec11f-507a-4151-b4c6-7077950918ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.110478] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c575ec84-7f45-4273-832b-ba00211363ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.123533] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Instance VIF info [] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2282.129014] env[62875]: DEBUG oslo.service.loopingcall [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2282.129242] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2282.129432] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7e9222e-1e7c-4db4-97c0-99e5056cb15f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.145716] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2282.145716] env[62875]: value = "task-2180553" [ 2282.145716] env[62875]: _type = "Task" [ 2282.145716] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.152716] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180553, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.262441] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.574941] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Applying migration context for instance f3297565-541b-4a8f-a753-419b6e953ff0 as it has an incoming, in-progress migration 64ce0f6d-8e14-4f10-a806-91824902a197. Migration status is post-migrating {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2282.576385] env[62875]: INFO nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating resource usage from migration 64ce0f6d-8e14-4f10-a806-91824902a197 [ 2282.595250] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7969485a-ccd6-48e0-bdea-b8920af28843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.595391] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 9e0aaea6-96cf-494d-9f70-a709a47f9772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.595517] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.595636] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 70547fbd-7ce8-466e-8abc-b490b8dd6b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.595751] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 45403db3-ff20-42d3-8a37-8db671d8c1fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.595869] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 9b3628be-b8a3-4105-bc84-088dede23aaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.595992] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.596107] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 84eca63b-87dc-4af4-a2dd-f489a06dcd58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.596222] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Migration 64ce0f6d-8e14-4f10-a806-91824902a197 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2282.596335] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance f3297565-541b-4a8f-a753-419b6e953ff0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.596449] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.596582] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 4356506b-ff11-43bb-84be-35ea0fe90cb1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2282.596780] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2282.596914] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2282.654617] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180553, 'name': CreateVM_Task, 'duration_secs': 0.246569} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.656674] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2282.657252] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2282.657414] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.657724] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2282.657980] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bbdf4b7-9184-49dc-86e5-f057a1083ff6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.662671] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2282.662671] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52925732-bba6-50c6-ef76-630198127f96" [ 2282.662671] env[62875]: _type = "Task" [ 2282.662671] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.672728] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52925732-bba6-50c6-ef76-630198127f96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.732665] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d19845c-6335-45e7-8257-c5c9bed5ddb4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.740064] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541d779c-8b21-4e01-a9f7-ed8e005fbfa4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.773727] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d392ea77-395d-4881-9ef0-12aeec5773f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.780696] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.783551] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8070d6-497b-4100-9867-c9d97f4e75e4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.796514] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2283.073540] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2283.073737] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2283.073913] env[62875]: DEBUG nova.network.neutron [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2283.172862] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52925732-bba6-50c6-ef76-630198127f96, 'name': SearchDatastore_Task, 'duration_secs': 0.01208} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.173160] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2283.173389] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2283.173637] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2283.173801] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2283.173982] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2283.174239] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67defec3-b3cf-49b5-a47c-eb1fcb3ddcb8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.181801] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2283.181970] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2283.182646] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90fd5252-0d60-424d-a3f7-935334ea555b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.187697] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2283.187697] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e64a6f-1990-b910-f181-c3303a527701" [ 2283.187697] env[62875]: _type = "Task" [ 2283.187697] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.194827] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e64a6f-1990-b910-f181-c3303a527701, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.279316] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.299376] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2283.697643] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e64a6f-1990-b910-f181-c3303a527701, 'name': SearchDatastore_Task, 'duration_secs': 0.008753} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.698405] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82dc7c76-e941-4c82-b9d8-6b2e58fad6c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.703873] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2283.703873] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b03539-f286-c1d1-ff72-3734b0dd3128" [ 2283.703873] env[62875]: _type = "Task" [ 2283.703873] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.711030] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b03539-f286-c1d1-ff72-3734b0dd3128, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.779929] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.803784] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2283.803986] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.239s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.839948] env[62875]: DEBUG nova.network.neutron [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance_info_cache with network_info: [{"id": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "address": "fa:16:3e:9a:24:e6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac461c5e-0f", "ovs_interfaceid": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.215026] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b03539-f286-c1d1-ff72-3734b0dd3128, 'name': SearchDatastore_Task, 'duration_secs': 0.008929} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.215290] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2284.215551] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58/84eca63b-87dc-4af4-a2dd-f489a06dcd58.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2284.215798] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59ca7130-0d64-438f-8a14-0480252c967d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.221898] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2284.221898] env[62875]: value = "task-2180554" [ 2284.221898] env[62875]: _type = "Task" [ 2284.221898] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.228705] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.281088] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.342266] env[62875]: DEBUG oslo_concurrency.lockutils [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2284.732609] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455202} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.732883] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58/84eca63b-87dc-4af4-a2dd-f489a06dcd58.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2284.733131] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2284.733392] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03dfabdc-660f-4a2a-bcc3-42950aa28bf5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.740699] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2284.740699] env[62875]: value = "task-2180555" [ 2284.740699] env[62875]: _type = "Task" [ 2284.740699] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.748139] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180555, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.781551] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.863173] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9897ff86-7b9c-4f71-95b8-5e6a77b3371d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.883129] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8b3c79-7873-49f4-ae6b-d3194c70464c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.890486] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance 'f3297565-541b-4a8f-a753-419b6e953ff0' progress to 83 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2285.250808] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180555, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064277} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.251118] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2285.251951] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06b7cec-a0fd-473d-85ba-abcc392aae2d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.271299] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58/84eca63b-87dc-4af4-a2dd-f489a06dcd58.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2285.271548] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08a36c72-f875-4827-aedb-a8706517ff2d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.293749] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.296396] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2285.296396] env[62875]: value = "task-2180556" [ 2285.296396] env[62875]: _type = "Task" [ 2285.296396] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.303836] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180556, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.397154] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2285.398425] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-724fbd29-e8f2-475b-9e11-d616a76f4c36 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.402149] env[62875]: DEBUG nova.compute.manager [req-e5d8b183-2c46-4a92-8550-22c8b9148f16 req-87317d0c-1ab2-4b97-8519-66d4f7e981a5 service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Received event network-vif-plugged-56a3ac28-628c-4b11-a501-4f0c95923e8a {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2285.402384] env[62875]: DEBUG oslo_concurrency.lockutils [req-e5d8b183-2c46-4a92-8550-22c8b9148f16 req-87317d0c-1ab2-4b97-8519-66d4f7e981a5 service nova] Acquiring lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2285.402631] env[62875]: DEBUG oslo_concurrency.lockutils [req-e5d8b183-2c46-4a92-8550-22c8b9148f16 req-87317d0c-1ab2-4b97-8519-66d4f7e981a5 service nova] Lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2285.402986] env[62875]: DEBUG oslo_concurrency.lockutils [req-e5d8b183-2c46-4a92-8550-22c8b9148f16 req-87317d0c-1ab2-4b97-8519-66d4f7e981a5 service nova] Lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2285.403243] env[62875]: DEBUG nova.compute.manager [req-e5d8b183-2c46-4a92-8550-22c8b9148f16 req-87317d0c-1ab2-4b97-8519-66d4f7e981a5 service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] No waiting events found dispatching network-vif-plugged-56a3ac28-628c-4b11-a501-4f0c95923e8a {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2285.403433] env[62875]: WARNING nova.compute.manager [req-e5d8b183-2c46-4a92-8550-22c8b9148f16 req-87317d0c-1ab2-4b97-8519-66d4f7e981a5 service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Received unexpected event network-vif-plugged-56a3ac28-628c-4b11-a501-4f0c95923e8a for instance with vm_state building and task_state spawning. [ 2285.410380] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2285.410380] env[62875]: value = "task-2180557" [ 2285.410380] env[62875]: _type = "Task" [ 2285.410380] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.419019] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.488990] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Successfully updated port: 56a3ac28-628c-4b11-a501-4f0c95923e8a {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2285.778921] env[62875]: DEBUG nova.compute.manager [req-927c70a2-5bd2-44ac-bb4a-04c252edf1eb req-f911b859-5623-4d2e-8fb5-9585a14b7c5d service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Received event network-vif-plugged-84ebcc4e-2df0-4dbb-81b5-17618136bfa5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2285.779212] env[62875]: DEBUG oslo_concurrency.lockutils [req-927c70a2-5bd2-44ac-bb4a-04c252edf1eb req-f911b859-5623-4d2e-8fb5-9585a14b7c5d service nova] Acquiring lock "4356506b-ff11-43bb-84be-35ea0fe90cb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2285.779689] env[62875]: DEBUG oslo_concurrency.lockutils [req-927c70a2-5bd2-44ac-bb4a-04c252edf1eb req-f911b859-5623-4d2e-8fb5-9585a14b7c5d service nova] Lock "4356506b-ff11-43bb-84be-35ea0fe90cb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2285.779689] env[62875]: DEBUG oslo_concurrency.lockutils [req-927c70a2-5bd2-44ac-bb4a-04c252edf1eb req-f911b859-5623-4d2e-8fb5-9585a14b7c5d service nova] Lock "4356506b-ff11-43bb-84be-35ea0fe90cb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2285.779797] env[62875]: DEBUG nova.compute.manager [req-927c70a2-5bd2-44ac-bb4a-04c252edf1eb req-f911b859-5623-4d2e-8fb5-9585a14b7c5d service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] No waiting events found dispatching network-vif-plugged-84ebcc4e-2df0-4dbb-81b5-17618136bfa5 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2285.779966] env[62875]: WARNING nova.compute.manager [req-927c70a2-5bd2-44ac-bb4a-04c252edf1eb req-f911b859-5623-4d2e-8fb5-9585a14b7c5d service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Received unexpected event network-vif-plugged-84ebcc4e-2df0-4dbb-81b5-17618136bfa5 for instance with vm_state building and task_state spawning. [ 2285.798279] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180541, 'name': CloneVM_Task, 'duration_secs': 10.5723} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.802548] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Created linked-clone VM from snapshot [ 2285.803356] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24d2236-5e6b-46e0-befa-7a0412936e34 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.811726] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180556, 'name': ReconfigVM_Task, 'duration_secs': 0.378173} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.815055] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58/84eca63b-87dc-4af4-a2dd-f489a06dcd58.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2285.815703] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Uploading image fe663fcf-69b6-4a2c-9621-48948b825272 {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2285.819096] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6c912e1-4971-4f19-8ffe-f2e5e8372bbc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.824472] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2285.824472] env[62875]: value = "task-2180558" [ 2285.824472] env[62875]: _type = "Task" [ 2285.824472] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.833435] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180558, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.846131] env[62875]: DEBUG oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2285.846131] env[62875]: value = "vm-444991" [ 2285.846131] env[62875]: _type = "VirtualMachine" [ 2285.846131] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2285.846131] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-adc1e19e-d87f-4c8f-a9ae-029cc2fade0f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.853809] env[62875]: DEBUG oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lease: (returnval){ [ 2285.853809] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529fcaf1-f321-82be-cc79-0fbee1ae4af7" [ 2285.853809] env[62875]: _type = "HttpNfcLease" [ 2285.853809] env[62875]: } obtained for exporting VM: (result){ [ 2285.853809] env[62875]: value = "vm-444991" [ 2285.853809] env[62875]: _type = "VirtualMachine" [ 2285.853809] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2285.854671] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the lease: (returnval){ [ 2285.854671] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529fcaf1-f321-82be-cc79-0fbee1ae4af7" [ 2285.854671] env[62875]: _type = "HttpNfcLease" [ 2285.854671] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2285.861276] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2285.861276] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529fcaf1-f321-82be-cc79-0fbee1ae4af7" [ 2285.861276] env[62875]: _type = "HttpNfcLease" [ 2285.861276] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2285.881890] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Successfully updated port: 84ebcc4e-2df0-4dbb-81b5-17618136bfa5 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2285.920314] env[62875]: DEBUG oslo_vmware.api [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180557, 'name': PowerOnVM_Task, 'duration_secs': 0.436551} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.920823] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2285.922024] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-cb992880-3e9b-4ef7-a8d4-d9c893ffe805 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance 'f3297565-541b-4a8f-a753-419b6e953ff0' progress to 100 {{(pid=62875) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2285.991469] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "refresh_cache-75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2285.991661] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "refresh_cache-75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2285.991787] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2286.335109] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180558, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.361876] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2286.361876] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529fcaf1-f321-82be-cc79-0fbee1ae4af7" [ 2286.361876] env[62875]: _type = "HttpNfcLease" [ 2286.361876] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2286.362214] env[62875]: DEBUG oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2286.362214] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529fcaf1-f321-82be-cc79-0fbee1ae4af7" [ 2286.362214] env[62875]: _type = "HttpNfcLease" [ 2286.362214] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2286.362909] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6de4b9-2b25-48f1-ba54-a14767e84685 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.369865] env[62875]: DEBUG oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7658d-64b0-d971-44fe-37f05cf42a64/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2286.370064] env[62875]: DEBUG oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7658d-64b0-d971-44fe-37f05cf42a64/disk-0.vmdk for reading. {{(pid=62875) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2286.425448] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "refresh_cache-4356506b-ff11-43bb-84be-35ea0fe90cb1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.425583] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "refresh_cache-4356506b-ff11-43bb-84be-35ea0fe90cb1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.425734] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2286.456585] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e8361d5c-20de-45d4-9bc8-17ae6a0e7fa2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.531690] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2286.655579] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Updating instance_info_cache with network_info: [{"id": "56a3ac28-628c-4b11-a501-4f0c95923e8a", "address": "fa:16:3e:30:16:b9", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56a3ac28-62", "ovs_interfaceid": "56a3ac28-628c-4b11-a501-4f0c95923e8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2286.835615] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180558, 'name': Rename_Task, 'duration_secs': 0.538254} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.835976] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2286.836785] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fd1394b-c902-47e6-9ce9-aca2342a7d0d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.843170] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2286.843170] env[62875]: value = "task-2180560" [ 2286.843170] env[62875]: _type = "Task" [ 2286.843170] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.851012] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180560, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.965755] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2287.158977] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "refresh_cache-75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2287.159339] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Instance network_info: |[{"id": "56a3ac28-628c-4b11-a501-4f0c95923e8a", "address": "fa:16:3e:30:16:b9", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56a3ac28-62", "ovs_interfaceid": "56a3ac28-628c-4b11-a501-4f0c95923e8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2287.159799] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:16:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56a3ac28-628c-4b11-a501-4f0c95923e8a', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2287.169217] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Creating folder: Project (d4f66deaad8e44f8b352e02a74ca6613). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2287.170768] env[62875]: DEBUG nova.network.neutron [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Updating instance_info_cache with network_info: [{"id": "84ebcc4e-2df0-4dbb-81b5-17618136bfa5", "address": "fa:16:3e:ee:1d:bc", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ebcc4e-2d", "ovs_interfaceid": "84ebcc4e-2df0-4dbb-81b5-17618136bfa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2287.172215] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-167f3006-ecb7-4093-9d14-fbe6f67a5f46 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.184481] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Created folder: Project (d4f66deaad8e44f8b352e02a74ca6613) in parent group-v444854. [ 2287.184693] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Creating folder: Instances. Parent ref: group-v444993. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2287.185057] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e770820-5820-468b-adbc-879561463350 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.195478] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Created folder: Instances in parent group-v444993. [ 2287.195773] env[62875]: DEBUG oslo.service.loopingcall [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2287.195995] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2287.196354] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d09c72d-8938-40db-bd5e-2f060ab8c86d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.218492] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2287.218492] env[62875]: value = "task-2180563" [ 2287.218492] env[62875]: _type = "Task" [ 2287.218492] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.226651] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180563, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.353892] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180560, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.433011] env[62875]: DEBUG nova.compute.manager [req-8e120c3c-28e8-4b96-9c29-5c7d96343e4a req-ab427b07-57b0-466f-8bb0-873f36b67d98 service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Received event network-changed-56a3ac28-628c-4b11-a501-4f0c95923e8a {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2287.433299] env[62875]: DEBUG nova.compute.manager [req-8e120c3c-28e8-4b96-9c29-5c7d96343e4a req-ab427b07-57b0-466f-8bb0-873f36b67d98 service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Refreshing instance network info cache due to event network-changed-56a3ac28-628c-4b11-a501-4f0c95923e8a. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2287.433588] env[62875]: DEBUG oslo_concurrency.lockutils [req-8e120c3c-28e8-4b96-9c29-5c7d96343e4a req-ab427b07-57b0-466f-8bb0-873f36b67d98 service nova] Acquiring lock "refresh_cache-75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2287.433735] env[62875]: DEBUG oslo_concurrency.lockutils [req-8e120c3c-28e8-4b96-9c29-5c7d96343e4a req-ab427b07-57b0-466f-8bb0-873f36b67d98 service nova] Acquired lock "refresh_cache-75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2287.433933] env[62875]: DEBUG nova.network.neutron [req-8e120c3c-28e8-4b96-9c29-5c7d96343e4a req-ab427b07-57b0-466f-8bb0-873f36b67d98 service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Refreshing network info cache for port 56a3ac28-628c-4b11-a501-4f0c95923e8a {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2287.675956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "refresh_cache-4356506b-ff11-43bb-84be-35ea0fe90cb1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2287.676301] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Instance network_info: |[{"id": "84ebcc4e-2df0-4dbb-81b5-17618136bfa5", "address": "fa:16:3e:ee:1d:bc", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ebcc4e-2d", "ovs_interfaceid": "84ebcc4e-2df0-4dbb-81b5-17618136bfa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2287.676768] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:1d:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84ebcc4e-2df0-4dbb-81b5-17618136bfa5', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2287.684641] env[62875]: DEBUG oslo.service.loopingcall [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2287.684934] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2287.685319] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd6ff22f-db81-4e20-a54f-c9ae807d130b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.705264] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2287.705264] env[62875]: value = "task-2180564" [ 2287.705264] env[62875]: _type = "Task" [ 2287.705264] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.715020] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180564, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.727983] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180563, 'name': CreateVM_Task, 'duration_secs': 0.469954} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.728262] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2287.729084] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2287.729456] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2287.729998] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2287.730352] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daca5407-76a7-49a6-931d-2ce9f122a11d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.735430] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2287.735430] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522d620b-6c0f-9dee-0cf8-ebbbf070073e" [ 2287.735430] env[62875]: _type = "Task" [ 2287.735430] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.744067] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522d620b-6c0f-9dee-0cf8-ebbbf070073e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.813183] env[62875]: DEBUG nova.compute.manager [req-49d89999-1fe6-4b08-83af-101e4f935701 req-a9637141-f445-414d-8638-adac5342da26 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Received event network-changed-84ebcc4e-2df0-4dbb-81b5-17618136bfa5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2287.813345] env[62875]: DEBUG nova.compute.manager [req-49d89999-1fe6-4b08-83af-101e4f935701 req-a9637141-f445-414d-8638-adac5342da26 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Refreshing instance network info cache due to event network-changed-84ebcc4e-2df0-4dbb-81b5-17618136bfa5. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2287.813688] env[62875]: DEBUG oslo_concurrency.lockutils [req-49d89999-1fe6-4b08-83af-101e4f935701 req-a9637141-f445-414d-8638-adac5342da26 service nova] Acquiring lock "refresh_cache-4356506b-ff11-43bb-84be-35ea0fe90cb1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2287.813890] env[62875]: DEBUG oslo_concurrency.lockutils [req-49d89999-1fe6-4b08-83af-101e4f935701 req-a9637141-f445-414d-8638-adac5342da26 service nova] Acquired lock "refresh_cache-4356506b-ff11-43bb-84be-35ea0fe90cb1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2287.814181] env[62875]: DEBUG nova.network.neutron [req-49d89999-1fe6-4b08-83af-101e4f935701 req-a9637141-f445-414d-8638-adac5342da26 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Refreshing network info cache for port 84ebcc4e-2df0-4dbb-81b5-17618136bfa5 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2287.854407] env[62875]: DEBUG oslo_vmware.api [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180560, 'name': PowerOnVM_Task, 'duration_secs': 0.687459} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.854710] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2287.854924] env[62875]: DEBUG nova.compute.manager [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2287.855812] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330c0fe0-1265-4e7e-8922-1d07b59ace85 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.159124] env[62875]: DEBUG nova.network.neutron [req-8e120c3c-28e8-4b96-9c29-5c7d96343e4a req-ab427b07-57b0-466f-8bb0-873f36b67d98 service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Updated VIF entry in instance network info cache for port 56a3ac28-628c-4b11-a501-4f0c95923e8a. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2288.159679] env[62875]: DEBUG nova.network.neutron [req-8e120c3c-28e8-4b96-9c29-5c7d96343e4a req-ab427b07-57b0-466f-8bb0-873f36b67d98 service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Updating instance_info_cache with network_info: [{"id": "56a3ac28-628c-4b11-a501-4f0c95923e8a", "address": "fa:16:3e:30:16:b9", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56a3ac28-62", "ovs_interfaceid": "56a3ac28-628c-4b11-a501-4f0c95923e8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2288.215788] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180564, 'name': CreateVM_Task, 'duration_secs': 0.467506} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.215977] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2288.216660] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2288.246406] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522d620b-6c0f-9dee-0cf8-ebbbf070073e, 'name': SearchDatastore_Task, 'duration_secs': 0.011808} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.246406] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2288.246600] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2288.246830] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2288.246977] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2288.247171] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2288.247453] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2288.247758] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2288.247982] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69a63541-36c8-48ac-9b6d-84d8d76b7be8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.249856] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50172221-c9b0-4676-812e-55af22c644aa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.256390] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2288.256390] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5258825a-6b4c-ff0f-ce31-069b44dbaf21" [ 2288.256390] env[62875]: _type = "Task" [ 2288.256390] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.260319] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2288.260498] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2288.261614] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18d8d621-ff20-4b26-b787-c6ac83e500fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.266782] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5258825a-6b4c-ff0f-ce31-069b44dbaf21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.269991] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2288.269991] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c8b0e7-f3c5-2ec8-9fa1-3147edf5a5c8" [ 2288.269991] env[62875]: _type = "Task" [ 2288.269991] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.278256] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c8b0e7-f3c5-2ec8-9fa1-3147edf5a5c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.377026] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.377026] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.377026] env[62875]: DEBUG nova.objects.instance [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62875) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2288.512729] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "f3297565-541b-4a8f-a753-419b6e953ff0" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.513031] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.513218] env[62875]: DEBUG nova.compute.manager [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Going to confirm migration 3 {{(pid=62875) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2288.530295] env[62875]: DEBUG nova.network.neutron [req-49d89999-1fe6-4b08-83af-101e4f935701 req-a9637141-f445-414d-8638-adac5342da26 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Updated VIF entry in instance network info cache for port 84ebcc4e-2df0-4dbb-81b5-17618136bfa5. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2288.530659] env[62875]: DEBUG nova.network.neutron [req-49d89999-1fe6-4b08-83af-101e4f935701 req-a9637141-f445-414d-8638-adac5342da26 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Updating instance_info_cache with network_info: [{"id": "84ebcc4e-2df0-4dbb-81b5-17618136bfa5", "address": "fa:16:3e:ee:1d:bc", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ebcc4e-2d", "ovs_interfaceid": "84ebcc4e-2df0-4dbb-81b5-17618136bfa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2288.662635] env[62875]: DEBUG oslo_concurrency.lockutils [req-8e120c3c-28e8-4b96-9c29-5c7d96343e4a req-ab427b07-57b0-466f-8bb0-873f36b67d98 service nova] Releasing lock "refresh_cache-75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2288.679956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.680272] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.680492] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.680724] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.680913] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2288.683127] env[62875]: INFO nova.compute.manager [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Terminating instance [ 2288.767336] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5258825a-6b4c-ff0f-ce31-069b44dbaf21, 'name': SearchDatastore_Task, 'duration_secs': 0.010393} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.767643] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2288.767877] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2288.768101] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2288.779837] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c8b0e7-f3c5-2ec8-9fa1-3147edf5a5c8, 'name': SearchDatastore_Task, 'duration_secs': 0.009234} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.780731] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef1cd6ea-f2f6-4f00-82d8-d68109a849e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.786489] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2288.786489] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5218842c-487c-6936-e28a-fff5e5759959" [ 2288.786489] env[62875]: _type = "Task" [ 2288.786489] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.794556] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5218842c-487c-6936-e28a-fff5e5759959, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.033871] env[62875]: DEBUG oslo_concurrency.lockutils [req-49d89999-1fe6-4b08-83af-101e4f935701 req-a9637141-f445-414d-8638-adac5342da26 service nova] Releasing lock "refresh_cache-4356506b-ff11-43bb-84be-35ea0fe90cb1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2289.072844] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2289.073114] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquired lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2289.073333] env[62875]: DEBUG nova.network.neutron [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2289.073507] env[62875]: DEBUG nova.objects.instance [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lazy-loading 'info_cache' on Instance uuid f3297565-541b-4a8f-a753-419b6e953ff0 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2289.187342] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "refresh_cache-84eca63b-87dc-4af4-a2dd-f489a06dcd58" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2289.187543] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquired lock "refresh_cache-84eca63b-87dc-4af4-a2dd-f489a06dcd58" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2289.187706] env[62875]: DEBUG nova.network.neutron [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2289.297530] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5218842c-487c-6936-e28a-fff5e5759959, 'name': SearchDatastore_Task, 'duration_secs': 0.015854} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.297796] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2289.298064] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3/75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2289.298349] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2289.298527] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2289.298760] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea4d66fc-01ee-4bbc-b690-416287363b35 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.300737] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be51c367-1cc5-4963-bd30-f2cf26a426b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.309360] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2289.309360] env[62875]: value = "task-2180565" [ 2289.309360] env[62875]: _type = "Task" [ 2289.309360] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.310606] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2289.310788] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2289.314328] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-997fd321-fdb2-47b8-ab8b-4d45d535f4ae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.322534] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.323797] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2289.323797] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525afc78-c5cc-bba4-2dfb-03abc58745d1" [ 2289.323797] env[62875]: _type = "Task" [ 2289.323797] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.331897] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525afc78-c5cc-bba4-2dfb-03abc58745d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.386673] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b4651266-b272-4650-ab7f-987fc389ea8e tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2289.707867] env[62875]: DEBUG nova.network.neutron [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2289.761486] env[62875]: DEBUG nova.network.neutron [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2289.820902] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180565, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.835105] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525afc78-c5cc-bba4-2dfb-03abc58745d1, 'name': SearchDatastore_Task, 'duration_secs': 0.025547} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.835979] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fa6b498-a550-4fb8-8828-897ea3a93d04 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.841444] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2289.841444] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52063e36-c88f-c4e0-cca7-2cfc7423890a" [ 2289.841444] env[62875]: _type = "Task" [ 2289.841444] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.849192] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52063e36-c88f-c4e0-cca7-2cfc7423890a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.264330] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Releasing lock "refresh_cache-84eca63b-87dc-4af4-a2dd-f489a06dcd58" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2290.264765] env[62875]: DEBUG nova.compute.manager [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2290.265070] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2290.266492] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108edcab-f1ce-407d-97c3-ba9412e362d0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.274882] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2290.275150] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-184cf587-26fd-4b67-9cde-d5805c786abb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.281634] env[62875]: DEBUG oslo_vmware.api [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2290.281634] env[62875]: value = "task-2180566" [ 2290.281634] env[62875]: _type = "Task" [ 2290.281634] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.290945] env[62875]: DEBUG oslo_vmware.api [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.321220] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180565, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551771} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.321665] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3/75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2290.321817] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2290.321953] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-263960c6-c491-4014-946c-59066a9821fb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.328681] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2290.328681] env[62875]: value = "task-2180567" [ 2290.328681] env[62875]: _type = "Task" [ 2290.328681] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.336411] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.351594] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52063e36-c88f-c4e0-cca7-2cfc7423890a, 'name': SearchDatastore_Task, 'duration_secs': 0.01038} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.351843] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2290.352129] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 4356506b-ff11-43bb-84be-35ea0fe90cb1/4356506b-ff11-43bb-84be-35ea0fe90cb1.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2290.352388] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77d008ec-e00f-4bdb-aa2a-696c0ac2783a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.359369] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2290.359369] env[62875]: value = "task-2180568" [ 2290.359369] env[62875]: _type = "Task" [ 2290.359369] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.367493] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.562235] env[62875]: DEBUG nova.network.neutron [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance_info_cache with network_info: [{"id": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "address": "fa:16:3e:9a:24:e6", "network": {"id": "74213996-bea1-4427-a511-fa492e32b378", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432642063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5062c761ea34842a2f6179ae76f3465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac461c5e-0f", "ovs_interfaceid": "ac461c5e-0fb6-4e69-adcc-14af13eb6dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2290.792918] env[62875]: DEBUG oslo_vmware.api [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180566, 'name': PowerOffVM_Task, 'duration_secs': 0.15304} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.793217] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2290.793391] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2290.793655] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13b0c893-8ed4-40c4-8287-7f9371e58b58 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.817434] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2290.817712] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2290.817834] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Deleting the datastore file [datastore2] 84eca63b-87dc-4af4-a2dd-f489a06dcd58 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2290.818204] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fd95c76-1366-4f7b-9292-f741dbf1d722 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.827817] env[62875]: DEBUG oslo_vmware.api [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for the task: (returnval){ [ 2290.827817] env[62875]: value = "task-2180570" [ 2290.827817] env[62875]: _type = "Task" [ 2290.827817] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.841914] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076709} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.845134] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2290.845471] env[62875]: DEBUG oslo_vmware.api [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.846213] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9baa8c8-b070-45a8-b810-802d81fc116a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.868520] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3/75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2290.871891] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e58c16b-6d48-4ecc-8f02-f3914538705a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.892072] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180568, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526245} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.893352] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 4356506b-ff11-43bb-84be-35ea0fe90cb1/4356506b-ff11-43bb-84be-35ea0fe90cb1.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2290.893610] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2290.893907] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2290.893907] env[62875]: value = "task-2180571" [ 2290.893907] env[62875]: _type = "Task" [ 2290.893907] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.894120] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed1fd958-3169-4f5b-a169-ec8c3f2d3219 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.904152] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180571, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.905312] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2290.905312] env[62875]: value = "task-2180572" [ 2290.905312] env[62875]: _type = "Task" [ 2290.905312] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.912406] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.065717] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Releasing lock "refresh_cache-f3297565-541b-4a8f-a753-419b6e953ff0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2291.066041] env[62875]: DEBUG nova.objects.instance [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lazy-loading 'migration_context' on Instance uuid f3297565-541b-4a8f-a753-419b6e953ff0 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2291.338310] env[62875]: DEBUG oslo_vmware.api [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Task: {'id': task-2180570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172413} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.338570] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2291.338691] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2291.338871] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2291.339058] env[62875]: INFO nova.compute.manager [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Took 1.07 seconds to destroy the instance on the hypervisor. [ 2291.339306] env[62875]: DEBUG oslo.service.loopingcall [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2291.339497] env[62875]: DEBUG nova.compute.manager [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2291.339594] env[62875]: DEBUG nova.network.neutron [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2291.354865] env[62875]: DEBUG nova.network.neutron [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2291.406818] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180571, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.414699] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082074} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.414959] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2291.415745] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ec6213-45b9-4fb5-a9af-0e8fd9de290b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.439069] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 4356506b-ff11-43bb-84be-35ea0fe90cb1/4356506b-ff11-43bb-84be-35ea0fe90cb1.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2291.439446] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5adabb1a-443a-4ad2-8cdc-61a7f30065cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.459460] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2291.459460] env[62875]: value = "task-2180573" [ 2291.459460] env[62875]: _type = "Task" [ 2291.459460] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.468887] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180573, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.569179] env[62875]: DEBUG nova.objects.base [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62875) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2291.569788] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bf04f8-71db-4631-80bd-e4afdb668396 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.589543] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbdddcf4-0d09-45a2-9d64-5484cb43da3c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.595314] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2291.595314] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525e5ef9-b006-2ddb-9744-74fe21d9715d" [ 2291.595314] env[62875]: _type = "Task" [ 2291.595314] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.603842] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525e5ef9-b006-2ddb-9744-74fe21d9715d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.857942] env[62875]: DEBUG nova.network.neutron [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2291.905984] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180571, 'name': ReconfigVM_Task, 'duration_secs': 0.528112} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.906271] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3/75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2291.906903] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7478a1a-df62-4b06-9f5b-c6cf4fb4f646 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.914382] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2291.914382] env[62875]: value = "task-2180574" [ 2291.914382] env[62875]: _type = "Task" [ 2291.914382] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.922629] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180574, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.969187] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180573, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.105869] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525e5ef9-b006-2ddb-9744-74fe21d9715d, 'name': SearchDatastore_Task, 'duration_secs': 0.0081} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.106159] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.106396] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.359428] env[62875]: INFO nova.compute.manager [-] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Took 1.02 seconds to deallocate network for instance. [ 2292.424114] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180574, 'name': Rename_Task, 'duration_secs': 0.299125} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.424425] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2292.424676] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5237d9e-4d2d-44f2-9981-2a3ad806b522 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.431774] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2292.431774] env[62875]: value = "task-2180575" [ 2292.431774] env[62875]: _type = "Task" [ 2292.431774] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.439922] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180575, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.469751] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180573, 'name': ReconfigVM_Task, 'duration_secs': 0.5213} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.470092] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 4356506b-ff11-43bb-84be-35ea0fe90cb1/4356506b-ff11-43bb-84be-35ea0fe90cb1.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2292.470794] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29e3ea8c-1679-4ced-afbc-daa54daa5e87 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.477303] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2292.477303] env[62875]: value = "task-2180576" [ 2292.477303] env[62875]: _type = "Task" [ 2292.477303] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.487621] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180576, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.780917] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef5f95c-86e1-4f45-bcfb-654eacb1fe43 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.788596] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ecc827-7bf0-450a-9716-ee4d6bfae108 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.819845] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42cb988-8909-4609-a9aa-d9a9f766b628 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.827528] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ddcbf3-9cbe-43b8-8b9a-b7c4e731681d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.841190] env[62875]: DEBUG nova.compute.provider_tree [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2292.866947] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.942791] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180575, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.987784] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180576, 'name': Rename_Task, 'duration_secs': 0.230643} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.988053] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2292.988310] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9013bfad-a151-4586-b6c0-12e6c85b55b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.995466] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2292.995466] env[62875]: value = "task-2180577" [ 2292.995466] env[62875]: _type = "Task" [ 2292.995466] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.003731] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180577, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.345018] env[62875]: DEBUG nova.scheduler.client.report [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2293.442932] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180575, 'name': PowerOnVM_Task, 'duration_secs': 0.708393} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.443231] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2293.443438] env[62875]: INFO nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Took 16.23 seconds to spawn the instance on the hypervisor. [ 2293.443623] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2293.444435] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccb60f2-728c-4b06-b3fa-2b39f6c9a8e1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.505017] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180577, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.875272] env[62875]: DEBUG oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7658d-64b0-d971-44fe-37f05cf42a64/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2293.876271] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fb7e0a-8d83-425a-bcf9-b6ea027382e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.882703] env[62875]: DEBUG oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7658d-64b0-d971-44fe-37f05cf42a64/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2293.882876] env[62875]: ERROR oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7658d-64b0-d971-44fe-37f05cf42a64/disk-0.vmdk due to incomplete transfer. [ 2293.883113] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a53488df-ee86-42fd-becf-56ff3c195f9a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.891447] env[62875]: DEBUG oslo_vmware.rw_handles [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7658d-64b0-d971-44fe-37f05cf42a64/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2293.891644] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Uploaded image fe663fcf-69b6-4a2c-9621-48948b825272 to the Glance image server {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2293.893995] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2293.894257] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f3055eac-7d97-4792-8831-ef3f2e6d9084 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.899682] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2293.899682] env[62875]: value = "task-2180578" [ 2293.899682] env[62875]: _type = "Task" [ 2293.899682] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.906839] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180578, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.962163] env[62875]: INFO nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Took 21.58 seconds to build instance. [ 2294.008716] env[62875]: DEBUG oslo_vmware.api [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180577, 'name': PowerOnVM_Task, 'duration_secs': 0.646478} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.009118] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2294.009401] env[62875]: INFO nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Took 14.56 seconds to spawn the instance on the hypervisor. [ 2294.009663] env[62875]: DEBUG nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2294.011109] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94953a3-02cb-42e9-a4fc-d3db33260098 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.355380] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.249s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2294.358481] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.492s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2294.358714] env[62875]: DEBUG nova.objects.instance [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lazy-loading 'resources' on Instance uuid 84eca63b-87dc-4af4-a2dd-f489a06dcd58 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2294.411081] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180578, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.463938] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.100s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2294.532395] env[62875]: INFO nova.compute.manager [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Took 22.13 seconds to build instance. [ 2294.914042] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180578, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.915301] env[62875]: INFO nova.scheduler.client.report [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted allocation for migration 64ce0f6d-8e14-4f10-a806-91824902a197 [ 2295.019374] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c8348d-d123-4536-9c2e-849db789befd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.027287] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d86379-11c3-48d5-96c9-47d11072defd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.798728] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5a5b38a-487a-4b2b-8aea-f104dd18f70e tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "4356506b-ff11-43bb-84be-35ea0fe90cb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.403s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.800223] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.287s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.806422] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8350ed4-4a29-407b-94d2-42836fc3e895 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.819400] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180578, 'name': Destroy_Task, 'duration_secs': 1.476089} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2295.821322] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Destroyed the VM [ 2295.821558] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2295.822510] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2e12ac94-4c98-441b-8e47-0cc846fa1661 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.825058] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f2e452-83ca-4f4e-acd0-fc42dea018ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.839816] env[62875]: DEBUG nova.compute.provider_tree [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2295.845043] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2295.845043] env[62875]: value = "task-2180579" [ 2295.845043] env[62875]: _type = "Task" [ 2295.845043] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2295.851526] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180579, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.314877] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.315375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.315546] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.315759] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.315942] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.317614] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "4356506b-ff11-43bb-84be-35ea0fe90cb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.317826] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "4356506b-ff11-43bb-84be-35ea0fe90cb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.317998] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "4356506b-ff11-43bb-84be-35ea0fe90cb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.318189] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "4356506b-ff11-43bb-84be-35ea0fe90cb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.318366] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "4356506b-ff11-43bb-84be-35ea0fe90cb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.320128] env[62875]: INFO nova.compute.manager [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Terminating instance [ 2296.321487] env[62875]: INFO nova.compute.manager [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Terminating instance [ 2296.343678] env[62875]: DEBUG nova.scheduler.client.report [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2296.356188] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180579, 'name': RemoveSnapshot_Task} progress is 56%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.825428] env[62875]: DEBUG nova.compute.manager [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2296.825632] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2296.826256] env[62875]: DEBUG nova.compute.manager [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2296.826437] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2296.827288] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250c8455-34ca-4a64-8a70-b6e24cd5fba1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.831970] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f103333d-d70e-461c-9825-4521e7176b21 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.839512] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2296.841087] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54d8572f-c703-4d80-8a02-ee9fa11b8ae6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.842404] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2296.842608] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae94ccdd-5a1b-4d60-8598-8f98e141355f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.847846] env[62875]: DEBUG oslo_vmware.api [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2296.847846] env[62875]: value = "task-2180580" [ 2296.847846] env[62875]: _type = "Task" [ 2296.847846] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2296.851596] env[62875]: DEBUG oslo_vmware.api [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2296.851596] env[62875]: value = "task-2180581" [ 2296.851596] env[62875]: _type = "Task" [ 2296.851596] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2296.852250] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.494s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.863474] env[62875]: DEBUG oslo_vmware.api [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180579, 'name': RemoveSnapshot_Task, 'duration_secs': 0.722666} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2296.869388] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2296.869634] env[62875]: INFO nova.compute.manager [None req-b97bc392-d8a8-491e-a63b-713ab0e8709d tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Took 23.70 seconds to snapshot the instance on the hypervisor. [ 2296.871999] env[62875]: DEBUG oslo_vmware.api [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180581, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.872229] env[62875]: DEBUG oslo_vmware.api [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.873812] env[62875]: INFO nova.scheduler.client.report [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Deleted allocations for instance 84eca63b-87dc-4af4-a2dd-f489a06dcd58 [ 2297.036495] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "f3297565-541b-4a8f-a753-419b6e953ff0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2297.036768] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2297.036987] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2297.037187] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2297.037358] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2297.039652] env[62875]: INFO nova.compute.manager [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Terminating instance [ 2297.363062] env[62875]: DEBUG oslo_vmware.api [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180580, 'name': PowerOffVM_Task, 'duration_secs': 0.229123} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.363620] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2297.363791] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2297.364025] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-833c0a91-8f0c-46fd-a0f6-86a6095a813e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.367909] env[62875]: DEBUG oslo_vmware.api [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180581, 'name': PowerOffVM_Task, 'duration_secs': 0.227679} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.368419] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2297.368587] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2297.368802] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-223f6592-af4a-4056-9c21-1cb62bcded1d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.380748] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b678a790-d836-4488-8af1-42c1aed752f6 tempest-ServerShowV254Test-458485411 tempest-ServerShowV254Test-458485411-project-member] Lock "84eca63b-87dc-4af4-a2dd-f489a06dcd58" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.700s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2297.543955] env[62875]: DEBUG nova.compute.manager [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2297.544141] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2297.545044] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe1ad7a-5175-404a-a383-61e95ef64f60 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.553279] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2297.553531] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ea8a767-eabc-4f26-9fcb-eaa321155bb3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.560122] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2297.560122] env[62875]: value = "task-2180584" [ 2297.560122] env[62875]: _type = "Task" [ 2297.560122] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2297.568460] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2298.070606] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180584, 'name': PowerOffVM_Task, 'duration_secs': 0.188256} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2298.070886] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2298.071073] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2298.071316] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6398adb-1a88-408b-b99d-2f3d7bad4061 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.935033] env[62875]: DEBUG nova.compute.manager [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2298.935033] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71af41a6-bc04-43b3-994e-df5cc2d3ce08 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.446056] env[62875]: INFO nova.compute.manager [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] instance snapshotting [ 2299.451121] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515a4f80-0e05-4df9-a90f-22a95eef13af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.468508] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695f2a58-a29e-45a2-b189-e094b4b019c4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.983554] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2299.983554] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-57349ca1-b519-434e-b5b8-76fef2926554 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.991848] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2299.991848] env[62875]: value = "task-2180586" [ 2299.991848] env[62875]: _type = "Task" [ 2299.991848] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2300.003446] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180586, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2300.508454] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180586, 'name': CreateSnapshot_Task, 'duration_secs': 0.444079} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2300.508728] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2300.509511] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a882c07-0949-4976-bc94-99e9254b3bbc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.030622] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2301.030622] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b0ca86fc-5b01-464e-b456-45329f7ff31c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.036647] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2301.036647] env[62875]: value = "task-2180587" [ 2301.036647] env[62875]: _type = "Task" [ 2301.036647] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2301.044544] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2301.550084] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2302.053269] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2302.551299] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2302.634026] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquiring lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2302.634247] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2303.052571] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.136244] env[62875]: DEBUG nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2303.552187] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.665101] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2303.665101] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2303.665101] env[62875]: INFO nova.compute.claims [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2304.053045] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.309215] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2304.309605] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2304.309759] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleting the datastore file [datastore1] 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2304.310036] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb6e5d0a-3c7a-4725-af9e-f94f3e1b8d08 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.314538] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2304.314728] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2304.314904] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleting the datastore file [datastore1] 4356506b-ff11-43bb-84be-35ea0fe90cb1 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2304.315163] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f83ace5-bb18-40da-a289-3e227ec60ca0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.317868] env[62875]: DEBUG oslo_vmware.api [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2304.317868] env[62875]: value = "task-2180588" [ 2304.317868] env[62875]: _type = "Task" [ 2304.317868] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2304.321940] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2304.322146] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2304.322319] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleting the datastore file [datastore1] f3297565-541b-4a8f-a753-419b6e953ff0 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2304.322914] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f5c6060-9576-45b9-8bd7-53dde3e30686 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.325599] env[62875]: DEBUG oslo_vmware.api [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2304.325599] env[62875]: value = "task-2180589" [ 2304.325599] env[62875]: _type = "Task" [ 2304.325599] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2304.328667] env[62875]: DEBUG oslo_vmware.api [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.332173] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for the task: (returnval){ [ 2304.332173] env[62875]: value = "task-2180590" [ 2304.332173] env[62875]: _type = "Task" [ 2304.332173] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2304.338385] env[62875]: DEBUG oslo_vmware.api [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.341068] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.554095] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.818931] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7559afef-1bc9-470b-951e-1846e75b3744 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.831716] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091acc62-7fb3-4eb3-ba89-69adf67911b4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.834944] env[62875]: DEBUG oslo_vmware.api [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132738} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.837950] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2304.838245] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2304.838567] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2304.838824] env[62875]: INFO nova.compute.manager [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Took 8.01 seconds to destroy the instance on the hypervisor. [ 2304.839103] env[62875]: DEBUG oslo.service.loopingcall [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2304.843729] env[62875]: DEBUG nova.compute.manager [-] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2304.843804] env[62875]: DEBUG nova.network.neutron [-] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2304.874867] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086f372c-0a7e-4407-8dc2-c252dd533b68 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.877446] env[62875]: DEBUG oslo_vmware.api [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141162} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.877670] env[62875]: DEBUG oslo_vmware.api [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Task: {'id': task-2180590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142304} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.877900] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2304.878090] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2304.878267] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2304.878439] env[62875]: INFO nova.compute.manager [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Took 8.05 seconds to destroy the instance on the hypervisor. [ 2304.878669] env[62875]: DEBUG oslo.service.loopingcall [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2304.878850] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2304.879021] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2304.879189] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2304.879346] env[62875]: INFO nova.compute.manager [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Took 7.34 seconds to destroy the instance on the hypervisor. [ 2304.879549] env[62875]: DEBUG oslo.service.loopingcall [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2304.880086] env[62875]: DEBUG nova.compute.manager [-] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2304.880190] env[62875]: DEBUG nova.network.neutron [-] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2304.881712] env[62875]: DEBUG nova.compute.manager [-] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2304.881809] env[62875]: DEBUG nova.network.neutron [-] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2304.888019] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dc6d8f-2f94-463a-bf93-8f06cbd5d59c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.899838] env[62875]: DEBUG nova.compute.provider_tree [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2305.053655] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180587, 'name': CloneVM_Task, 'duration_secs': 3.844737} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2305.053937] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Created linked-clone VM from snapshot [ 2305.054694] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346d65e5-a5ba-44ab-b569-39ef06b2eab4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.061907] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Uploading image e08639e6-d268-4e23-985d-a17e45dbb64a {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2305.092867] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2305.092867] env[62875]: value = "vm-444998" [ 2305.092867] env[62875]: _type = "VirtualMachine" [ 2305.092867] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2305.093166] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9be09b9a-fb7f-43b3-924c-3264a2e7e67f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.100536] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lease: (returnval){ [ 2305.100536] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cf7dd-6028-5561-cc66-7edca6fffef9" [ 2305.100536] env[62875]: _type = "HttpNfcLease" [ 2305.100536] env[62875]: } obtained for exporting VM: (result){ [ 2305.100536] env[62875]: value = "vm-444998" [ 2305.100536] env[62875]: _type = "VirtualMachine" [ 2305.100536] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2305.100993] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the lease: (returnval){ [ 2305.100993] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cf7dd-6028-5561-cc66-7edca6fffef9" [ 2305.100993] env[62875]: _type = "HttpNfcLease" [ 2305.100993] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2305.106720] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2305.106720] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cf7dd-6028-5561-cc66-7edca6fffef9" [ 2305.106720] env[62875]: _type = "HttpNfcLease" [ 2305.106720] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2305.168425] env[62875]: DEBUG nova.compute.manager [req-31ef014a-bf7c-4009-a3d3-ece9e19adfdc req-f44cb99b-3fff-43a5-84fb-9fdf9e3d55e8 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Received event network-vif-deleted-84ebcc4e-2df0-4dbb-81b5-17618136bfa5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2305.168626] env[62875]: INFO nova.compute.manager [req-31ef014a-bf7c-4009-a3d3-ece9e19adfdc req-f44cb99b-3fff-43a5-84fb-9fdf9e3d55e8 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Neutron deleted interface 84ebcc4e-2df0-4dbb-81b5-17618136bfa5; detaching it from the instance and deleting it from the info cache [ 2305.168795] env[62875]: DEBUG nova.network.neutron [req-31ef014a-bf7c-4009-a3d3-ece9e19adfdc req-f44cb99b-3fff-43a5-84fb-9fdf9e3d55e8 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2305.299197] env[62875]: DEBUG nova.compute.manager [req-d7575d13-5f6f-4919-a97a-e1ae998fe63b req-44488c28-fd53-4dd6-9183-d45f00d750bf service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Received event network-vif-deleted-56a3ac28-628c-4b11-a501-4f0c95923e8a {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2305.299352] env[62875]: INFO nova.compute.manager [req-d7575d13-5f6f-4919-a97a-e1ae998fe63b req-44488c28-fd53-4dd6-9183-d45f00d750bf service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Neutron deleted interface 56a3ac28-628c-4b11-a501-4f0c95923e8a; detaching it from the instance and deleting it from the info cache [ 2305.299467] env[62875]: DEBUG nova.network.neutron [req-d7575d13-5f6f-4919-a97a-e1ae998fe63b req-44488c28-fd53-4dd6-9183-d45f00d750bf service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2305.402712] env[62875]: DEBUG nova.scheduler.client.report [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2305.457575] env[62875]: INFO nova.compute.manager [None req-1c8570e0-4b26-457f-b173-a3c0d30f0593 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Get console output [ 2305.457952] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-1c8570e0-4b26-457f-b173-a3c0d30f0593 tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] The console log is missing. Check your VSPC configuration [ 2305.608874] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2305.608874] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cf7dd-6028-5561-cc66-7edca6fffef9" [ 2305.608874] env[62875]: _type = "HttpNfcLease" [ 2305.608874] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2305.609202] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2305.609202] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cf7dd-6028-5561-cc66-7edca6fffef9" [ 2305.609202] env[62875]: _type = "HttpNfcLease" [ 2305.609202] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2305.609908] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b73f60a-cbed-4d14-bee3-396f9d67a878 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.617023] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdcbe8-16e1-e14a-7369-3c6bfc4cac37/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2305.617217] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdcbe8-16e1-e14a-7369-3c6bfc4cac37/disk-0.vmdk for reading. {{(pid=62875) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2305.672474] env[62875]: DEBUG nova.network.neutron [-] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2305.674159] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7462de99-3e5e-4889-8fe1-c9f0b51edc84 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.684628] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff62cafb-1a72-42dd-92e8-1807e4286af6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.704122] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bbacd714-2e25-4941-9969-575e23a6770b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.713781] env[62875]: DEBUG nova.compute.manager [req-31ef014a-bf7c-4009-a3d3-ece9e19adfdc req-f44cb99b-3fff-43a5-84fb-9fdf9e3d55e8 service nova] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Detach interface failed, port_id=84ebcc4e-2df0-4dbb-81b5-17618136bfa5, reason: Instance 4356506b-ff11-43bb-84be-35ea0fe90cb1 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2305.779702] env[62875]: DEBUG nova.network.neutron [-] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2305.797808] env[62875]: DEBUG nova.network.neutron [-] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2305.803940] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecf413e3-e967-4aac-b262-d65fc49166f1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.814306] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1006597e-1421-40ed-bca0-7b4fce41c8fd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.843344] env[62875]: DEBUG nova.compute.manager [req-d7575d13-5f6f-4919-a97a-e1ae998fe63b req-44488c28-fd53-4dd6-9183-d45f00d750bf service nova] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Detach interface failed, port_id=56a3ac28-628c-4b11-a501-4f0c95923e8a, reason: Instance 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2305.907763] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2305.908423] env[62875]: DEBUG nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2306.174866] env[62875]: INFO nova.compute.manager [-] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Took 1.29 seconds to deallocate network for instance. [ 2306.282155] env[62875]: INFO nova.compute.manager [-] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Took 1.44 seconds to deallocate network for instance. [ 2306.300436] env[62875]: INFO nova.compute.manager [-] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Took 1.42 seconds to deallocate network for instance. [ 2306.412967] env[62875]: DEBUG nova.compute.utils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2306.414431] env[62875]: DEBUG nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2306.415126] env[62875]: DEBUG nova.network.neutron [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2306.455422] env[62875]: DEBUG nova.policy [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8ada1169581405593c044334da15c8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4587f22755546f3980cc2900bb6bc7c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2306.537815] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquiring lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.538099] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2306.538315] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquiring lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.538504] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2306.538745] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2306.541482] env[62875]: INFO nova.compute.manager [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Terminating instance [ 2306.682726] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.683112] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2306.683455] env[62875]: DEBUG nova.objects.instance [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lazy-loading 'resources' on Instance uuid 4356506b-ff11-43bb-84be-35ea0fe90cb1 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2306.726772] env[62875]: DEBUG nova.network.neutron [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Successfully created port: 0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2306.789451] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.809483] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.918352] env[62875]: DEBUG nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2307.045683] env[62875]: DEBUG nova.compute.manager [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2307.045872] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2307.046867] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ac2ba9-f63a-489f-ab0a-6d6e59083108 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.055137] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2307.055427] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aaffa45e-5f07-4555-ab60-2934d7fc06e1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.061278] env[62875]: DEBUG oslo_vmware.api [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2307.061278] env[62875]: value = "task-2180592" [ 2307.061278] env[62875]: _type = "Task" [ 2307.061278] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.069628] env[62875]: DEBUG oslo_vmware.api [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.323932] env[62875]: DEBUG nova.compute.manager [req-4af9cdbb-8639-49d8-9f86-14ad3519a8b1 req-543ac085-21d6-402a-8587-d36f94dbcf4a service nova] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Received event network-vif-deleted-ac461c5e-0fb6-4e69-adcc-14af13eb6dca {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2307.340331] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ef0afe-752b-4efd-8b0a-1f16c25baa67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.348449] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4492303-508f-47e4-9965-eb3893c94308 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.381593] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90a8b6c-4d63-406e-a3e2-66cd0fc35d06 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.389138] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df407fe-93d5-4458-a0b8-b8da79d397a3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.402468] env[62875]: DEBUG nova.compute.provider_tree [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2307.573038] env[62875]: DEBUG oslo_vmware.api [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180592, 'name': PowerOffVM_Task, 'duration_secs': 0.184959} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2307.573438] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2307.573574] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2307.573883] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06d988e5-fe1b-4ab3-b2c4-3ecc5d93bec9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.673316] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2307.674877] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2307.674877] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Deleting the datastore file [datastore2] 70547fbd-7ce8-466e-8abc-b490b8dd6b28 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2307.674877] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52f34abc-2434-47ce-b558-ded6d0a06253 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.681336] env[62875]: DEBUG oslo_vmware.api [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for the task: (returnval){ [ 2307.681336] env[62875]: value = "task-2180594" [ 2307.681336] env[62875]: _type = "Task" [ 2307.681336] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.689388] env[62875]: DEBUG oslo_vmware.api [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.906356] env[62875]: DEBUG nova.scheduler.client.report [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2307.927686] env[62875]: DEBUG nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2307.949471] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2307.949720] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2307.949878] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2307.950071] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2307.950223] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2307.950372] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2307.950574] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2307.950732] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2307.950900] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2307.951079] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2307.951261] env[62875]: DEBUG nova.virt.hardware [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2307.952327] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f63a791-9381-456b-a3c2-299aa4c7cf56 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.960614] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44813dc8-1b5b-43b2-ad20-f1a06b2928f2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.192493] env[62875]: DEBUG oslo_vmware.api [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Task: {'id': task-2180594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145786} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.192786] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2308.193014] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2308.193280] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2308.193408] env[62875]: INFO nova.compute.manager [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2308.193668] env[62875]: DEBUG oslo.service.loopingcall [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2308.193872] env[62875]: DEBUG nova.compute.manager [-] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2308.194032] env[62875]: DEBUG nova.network.neutron [-] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2308.247725] env[62875]: DEBUG nova.network.neutron [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Successfully updated port: 0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2308.412403] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2308.415663] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.626s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2308.416103] env[62875]: DEBUG nova.objects.instance [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lazy-loading 'resources' on Instance uuid 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2308.443093] env[62875]: INFO nova.scheduler.client.report [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleted allocations for instance 4356506b-ff11-43bb-84be-35ea0fe90cb1 [ 2308.750850] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquiring lock "refresh_cache-9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2308.751205] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquired lock "refresh_cache-9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2308.751205] env[62875]: DEBUG nova.network.neutron [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2308.951062] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e47ce066-6538-4f19-940b-a4a034cc1089 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "4356506b-ff11-43bb-84be-35ea0fe90cb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.633s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2309.063148] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b03278b-cc7b-4ede-b8bd-4ded5a0bfc66 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.071025] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878f3da3-a5f9-4eda-a0d4-8eef281529e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.101733] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4359b4c-fb82-4959-bc31-0efbed7068f4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.109667] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58734adc-cf15-4bd3-a07c-5d6b024a2063 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.124424] env[62875]: DEBUG nova.compute.provider_tree [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2309.284261] env[62875]: DEBUG nova.network.neutron [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2309.303901] env[62875]: DEBUG nova.network.neutron [-] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2309.358059] env[62875]: DEBUG nova.compute.manager [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Received event network-vif-plugged-0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2309.358296] env[62875]: DEBUG oslo_concurrency.lockutils [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] Acquiring lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.358624] env[62875]: DEBUG oslo_concurrency.lockutils [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] Lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.358744] env[62875]: DEBUG oslo_concurrency.lockutils [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] Lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2309.358845] env[62875]: DEBUG nova.compute.manager [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] No waiting events found dispatching network-vif-plugged-0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2309.358980] env[62875]: WARNING nova.compute.manager [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Received unexpected event network-vif-plugged-0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86 for instance with vm_state building and task_state spawning. [ 2309.359275] env[62875]: DEBUG nova.compute.manager [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Received event network-changed-0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2309.359645] env[62875]: DEBUG nova.compute.manager [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Refreshing instance network info cache due to event network-changed-0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2309.359645] env[62875]: DEBUG oslo_concurrency.lockutils [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] Acquiring lock "refresh_cache-9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2309.452283] env[62875]: DEBUG nova.network.neutron [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Updating instance_info_cache with network_info: [{"id": "0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86", "address": "fa:16:3e:50:bc:b1", "network": {"id": "cec03f05-a46f-4a00-ab1b-25c254fc7c96", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-880939329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4587f22755546f3980cc2900bb6bc7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4f8284-ea", "ovs_interfaceid": "0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2309.628233] env[62875]: DEBUG nova.scheduler.client.report [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2309.806385] env[62875]: INFO nova.compute.manager [-] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Took 1.61 seconds to deallocate network for instance. [ 2309.956057] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Releasing lock "refresh_cache-9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2309.956438] env[62875]: DEBUG nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Instance network_info: |[{"id": "0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86", "address": "fa:16:3e:50:bc:b1", "network": {"id": "cec03f05-a46f-4a00-ab1b-25c254fc7c96", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-880939329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4587f22755546f3980cc2900bb6bc7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4f8284-ea", "ovs_interfaceid": "0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2309.956738] env[62875]: DEBUG oslo_concurrency.lockutils [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] Acquired lock "refresh_cache-9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2309.956923] env[62875]: DEBUG nova.network.neutron [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Refreshing network info cache for port 0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2309.958146] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:bc:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2309.966590] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Creating folder: Project (e4587f22755546f3980cc2900bb6bc7c). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2309.969899] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-298ac5db-abf7-4e22-8e01-e0ee9bd3312d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.981565] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Created folder: Project (e4587f22755546f3980cc2900bb6bc7c) in parent group-v444854. [ 2309.981765] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Creating folder: Instances. Parent ref: group-v444999. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2309.982035] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7f96f78-5fc7-415c-baa3-42bda44024dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.990657] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Created folder: Instances in parent group-v444999. [ 2309.990894] env[62875]: DEBUG oslo.service.loopingcall [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2309.991103] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2309.991317] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30b3c26b-73bd-43a6-a478-872ba8e8043d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.014718] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2310.014718] env[62875]: value = "task-2180597" [ 2310.014718] env[62875]: _type = "Task" [ 2310.014718] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.022653] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180597, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.134858] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.719s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.137135] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.328s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.137343] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.156270] env[62875]: INFO nova.scheduler.client.report [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Deleted allocations for instance f3297565-541b-4a8f-a753-419b6e953ff0 [ 2310.158077] env[62875]: INFO nova.scheduler.client.report [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleted allocations for instance 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3 [ 2310.206145] env[62875]: DEBUG nova.network.neutron [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Updated VIF entry in instance network info cache for port 0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2310.206545] env[62875]: DEBUG nova.network.neutron [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Updating instance_info_cache with network_info: [{"id": "0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86", "address": "fa:16:3e:50:bc:b1", "network": {"id": "cec03f05-a46f-4a00-ab1b-25c254fc7c96", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-880939329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4587f22755546f3980cc2900bb6bc7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4f8284-ea", "ovs_interfaceid": "0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2310.313753] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.314111] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.314400] env[62875]: DEBUG nova.objects.instance [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lazy-loading 'resources' on Instance uuid 70547fbd-7ce8-466e-8abc-b490b8dd6b28 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2310.525465] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180597, 'name': CreateVM_Task, 'duration_secs': 0.443386} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2310.525661] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2310.526368] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2310.526532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2310.526857] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2310.527153] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0344e71-6c82-4727-9073-2422b6531593 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.532119] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2310.532119] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525f949f-75d4-fda8-5516-399ab7701a59" [ 2310.532119] env[62875]: _type = "Task" [ 2310.532119] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.540505] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525f949f-75d4-fda8-5516-399ab7701a59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.676042] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b76a71e0-d857-4ee2-9da2-7943987ce355 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.361s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.679746] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4f4541f4-c23c-4ec8-a9d3-c3b4631a7d02 tempest-DeleteServersTestJSON-1338008548 tempest-DeleteServersTestJSON-1338008548-project-member] Lock "f3297565-541b-4a8f-a753-419b6e953ff0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.643s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.710479] env[62875]: DEBUG oslo_concurrency.lockutils [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] Releasing lock "refresh_cache-9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2310.710776] env[62875]: DEBUG nova.compute.manager [req-8d35a796-286c-4d46-bd34-8de770e987a6 req-7497c3dc-8cde-49b9-898a-4d4281cb742e service nova] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Received event network-vif-deleted-4d852812-d3cb-456e-992b-fe1bb2ebc36e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2310.931788] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b90e8b5-c83d-4e1e-bc2e-015fca926b03 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.939928] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fdf2d7-a042-4475-b9b8-d23a710ea06e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.972461] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20791e1-5420-4482-8c31-e80f5e5c180c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.980757] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec23cba2-da56-4238-8884-ddaf38b5b0a9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.994754] env[62875]: DEBUG nova.compute.provider_tree [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2311.044497] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525f949f-75d4-fda8-5516-399ab7701a59, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.044830] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2311.045084] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2311.045359] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2311.045518] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2311.045705] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2311.045974] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b18f8ed-4566-4032-8f06-a80b1a28d495 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.054576] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2311.055009] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2311.055518] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a638d137-c9a4-4c1a-bc66-341b43aeca41 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.060913] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2311.060913] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a7586d-4cae-d3fe-098f-1c3f54d4e59f" [ 2311.060913] env[62875]: _type = "Task" [ 2311.060913] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.068618] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a7586d-4cae-d3fe-098f-1c3f54d4e59f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.489082] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "837d85b2-1c61-4286-bd36-49ad273107df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2311.489326] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "837d85b2-1c61-4286-bd36-49ad273107df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2311.497487] env[62875]: DEBUG nova.scheduler.client.report [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2311.519603] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2311.519834] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2311.571430] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a7586d-4cae-d3fe-098f-1c3f54d4e59f, 'name': SearchDatastore_Task, 'duration_secs': 0.011368} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.572219] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3bb5662-6f66-4820-a055-a05a3bc2d547 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.577839] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2311.577839] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e975e7-aef5-6927-7aad-59f646179215" [ 2311.577839] env[62875]: _type = "Task" [ 2311.577839] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.585526] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e975e7-aef5-6927-7aad-59f646179215, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.991780] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2312.001938] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.688s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.021678] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2312.025932] env[62875]: INFO nova.scheduler.client.report [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Deleted allocations for instance 70547fbd-7ce8-466e-8abc-b490b8dd6b28 [ 2312.088611] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e975e7-aef5-6927-7aad-59f646179215, 'name': SearchDatastore_Task, 'duration_secs': 0.010119} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.088873] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2312.089180] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d/9e6c5891-a5fe-49f0-94af-6ac6d3704d7d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2312.089442] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-423e66ff-eab1-4539-89a0-1ebcc7991f72 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.096611] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2312.096611] env[62875]: value = "task-2180599" [ 2312.096611] env[62875]: _type = "Task" [ 2312.096611] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2312.104476] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180599, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.515058] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.515364] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.517340] env[62875]: INFO nova.compute.claims [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2312.533171] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3e74f598-32e6-47b5-a4d2-c8f6f8051ead tempest-ServersV294TestFqdnHostnames-323791811 tempest-ServersV294TestFqdnHostnames-323791811-project-member] Lock "70547fbd-7ce8-466e-8abc-b490b8dd6b28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.995s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.550665] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.607374] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180599, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490575} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.607629] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d/9e6c5891-a5fe-49f0-94af-6ac6d3704d7d.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2312.607852] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2312.608124] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3986c1c2-5629-4ef3-ba38-3b6468dfee75 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.614195] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2312.614195] env[62875]: value = "task-2180600" [ 2312.614195] env[62875]: _type = "Task" [ 2312.614195] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2312.622474] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180600, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.124037] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180600, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110266} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2313.124333] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2313.125023] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a3d402-cdf5-43e7-becb-49133c6d109f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.149659] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d/9e6c5891-a5fe-49f0-94af-6ac6d3704d7d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2313.150033] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d48b2480-9369-46c3-97ae-e9eb871d2661 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.170041] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2313.170041] env[62875]: value = "task-2180601" [ 2313.170041] env[62875]: _type = "Task" [ 2313.170041] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.178990] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180601, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.429783] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdcbe8-16e1-e14a-7369-3c6bfc4cac37/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2313.430921] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0bfdbf-87b5-473e-b173-f0472ccf7040 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.447443] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdcbe8-16e1-e14a-7369-3c6bfc4cac37/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2313.447627] env[62875]: ERROR oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdcbe8-16e1-e14a-7369-3c6bfc4cac37/disk-0.vmdk due to incomplete transfer. [ 2313.447928] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a6832151-0968-47db-a09e-a22143466a94 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.457560] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdcbe8-16e1-e14a-7369-3c6bfc4cac37/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2313.457761] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Uploaded image e08639e6-d268-4e23-985d-a17e45dbb64a to the Glance image server {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2313.463363] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2313.463654] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4e2ac352-e7ea-4588-99c8-aac1f1616630 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.469776] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2313.469776] env[62875]: value = "task-2180602" [ 2313.469776] env[62875]: _type = "Task" [ 2313.469776] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.482650] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180602, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.668894] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88955141-071b-4629-b907-4d29a4259aac {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.688689] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1ce4d0-ada0-4a67-b283-627b3e965549 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.693905] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180601, 'name': ReconfigVM_Task, 'duration_secs': 0.304727} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2313.694335] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d/9e6c5891-a5fe-49f0-94af-6ac6d3704d7d.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2313.695578] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91b15d37-f905-4c6d-b1d9-6758053c27d1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.743237] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb549bf-f671-4fdc-9a7b-6a2d0acbfd79 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.749487] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2313.749487] env[62875]: value = "task-2180603" [ 2313.749487] env[62875]: _type = "Task" [ 2313.749487] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.761669] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3f46a8-f8a8-460f-a451-b54c9ff8ef27 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.776989] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180603, 'name': Rename_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.793158] env[62875]: DEBUG nova.compute.provider_tree [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2313.982297] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180602, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.260376] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180603, 'name': Rename_Task, 'duration_secs': 0.148993} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.260847] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2314.261381] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf6565dc-f5d9-4a94-a93c-69b0b7dfcbe8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.267031] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2314.267031] env[62875]: value = "task-2180604" [ 2314.267031] env[62875]: _type = "Task" [ 2314.267031] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.274460] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180604, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.296773] env[62875]: DEBUG nova.scheduler.client.report [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2314.482229] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180602, 'name': Destroy_Task, 'duration_secs': 0.578656} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.483862] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Destroyed the VM [ 2314.483862] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2314.483862] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-04e6aba7-5fc2-4de6-acec-7e9daed8aed0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.489132] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2314.489132] env[62875]: value = "task-2180605" [ 2314.489132] env[62875]: _type = "Task" [ 2314.489132] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.496654] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180605, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.777892] env[62875]: DEBUG oslo_vmware.api [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180604, 'name': PowerOnVM_Task, 'duration_secs': 0.4462} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.779906] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2314.779906] env[62875]: INFO nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Took 6.85 seconds to spawn the instance on the hypervisor. [ 2314.779906] env[62875]: DEBUG nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2314.779906] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c18245-22d9-492d-bbef-ca460905eb16 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.802516] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2314.803284] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2314.806387] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.256s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2314.807832] env[62875]: INFO nova.compute.claims [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2314.999714] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180605, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2315.302768] env[62875]: INFO nova.compute.manager [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Took 11.66 seconds to build instance. [ 2315.312926] env[62875]: DEBUG nova.compute.utils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2315.315581] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2315.315755] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2315.501954] env[62875]: DEBUG oslo_vmware.api [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180605, 'name': RemoveSnapshot_Task, 'duration_secs': 0.547873} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2315.501954] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2315.501954] env[62875]: INFO nova.compute.manager [None req-9a39df10-477f-4c38-b01c-cf2cb67ce019 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Took 16.05 seconds to snapshot the instance on the hypervisor. [ 2315.602051] env[62875]: DEBUG nova.policy [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec72ce9630d1427a93aeed584e2c989e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4f66deaad8e44f8b352e02a74ca6613', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2315.805642] env[62875]: DEBUG oslo_concurrency.lockutils [None req-b1fdafec-1617-4847-bb77-efbb07ebd243 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.171s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.816879] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2315.979558] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ede1844-255f-45a6-a312-6547a9da75d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.989896] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec35a3db-e235-436f-9954-8cf19b19d83e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.030212] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa794d1-d512-4b90-a31a-069a0405c764 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.039030] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8230fed-0da1-482a-b7d2-222d2f51d1e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.043936] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Successfully created port: f6a2852f-1e58-4373-a694-0bdb0031f649 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2316.056501] env[62875]: DEBUG nova.compute.provider_tree [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2316.559621] env[62875]: DEBUG nova.scheduler.client.report [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2316.636899] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquiring lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2316.637241] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2316.637514] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquiring lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2316.637640] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2316.637807] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2316.639985] env[62875]: INFO nova.compute.manager [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Terminating instance [ 2316.831294] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2316.853482] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2316.853670] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2316.853774] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2316.853965] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2316.854151] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2316.854307] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2316.854509] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2316.854665] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2316.854887] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2316.854977] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2316.855160] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2316.856112] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b0fa82-0936-4876-bb07-7c515fa2e8ea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.863998] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842e11df-1a12-4ff0-9299-51e44646242b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.065214] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.259s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.065832] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2317.145923] env[62875]: DEBUG nova.compute.manager [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2317.146168] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2317.147080] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca81c025-4d1c-4051-b0ab-9b0b2d149a25 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.154857] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2317.155124] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fe15cfb-aa2c-4256-8f1d-150c07cd0572 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.162147] env[62875]: DEBUG oslo_vmware.api [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2317.162147] env[62875]: value = "task-2180606" [ 2317.162147] env[62875]: _type = "Task" [ 2317.162147] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.170211] env[62875]: DEBUG oslo_vmware.api [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180606, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.571086] env[62875]: DEBUG nova.compute.utils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2317.572589] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2317.572764] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2317.657775] env[62875]: DEBUG nova.policy [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec72ce9630d1427a93aeed584e2c989e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4f66deaad8e44f8b352e02a74ca6613', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2317.671009] env[62875]: DEBUG oslo_vmware.api [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180606, 'name': PowerOffVM_Task, 'duration_secs': 0.213347} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2317.671415] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2317.671523] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2317.671732] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-190393f4-fa4d-48f6-a811-7c27ab9a2e66 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.687874] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.688129] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.688335] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.688515] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.688683] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.690994] env[62875]: INFO nova.compute.manager [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Terminating instance [ 2317.743991] env[62875]: DEBUG nova.compute.manager [req-f07a2024-ad23-467a-9ee0-b4e581725e06 req-9ca00f13-aab3-423f-ab81-f86edda9b1df service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Received event network-vif-plugged-f6a2852f-1e58-4373-a694-0bdb0031f649 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2317.744409] env[62875]: DEBUG oslo_concurrency.lockutils [req-f07a2024-ad23-467a-9ee0-b4e581725e06 req-9ca00f13-aab3-423f-ab81-f86edda9b1df service nova] Acquiring lock "837d85b2-1c61-4286-bd36-49ad273107df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.744727] env[62875]: DEBUG oslo_concurrency.lockutils [req-f07a2024-ad23-467a-9ee0-b4e581725e06 req-9ca00f13-aab3-423f-ab81-f86edda9b1df service nova] Lock "837d85b2-1c61-4286-bd36-49ad273107df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.745015] env[62875]: DEBUG oslo_concurrency.lockutils [req-f07a2024-ad23-467a-9ee0-b4e581725e06 req-9ca00f13-aab3-423f-ab81-f86edda9b1df service nova] Lock "837d85b2-1c61-4286-bd36-49ad273107df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.745218] env[62875]: DEBUG nova.compute.manager [req-f07a2024-ad23-467a-9ee0-b4e581725e06 req-9ca00f13-aab3-423f-ab81-f86edda9b1df service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] No waiting events found dispatching network-vif-plugged-f6a2852f-1e58-4373-a694-0bdb0031f649 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2317.745393] env[62875]: WARNING nova.compute.manager [req-f07a2024-ad23-467a-9ee0-b4e581725e06 req-9ca00f13-aab3-423f-ab81-f86edda9b1df service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Received unexpected event network-vif-plugged-f6a2852f-1e58-4373-a694-0bdb0031f649 for instance with vm_state building and task_state spawning. [ 2317.920813] env[62875]: INFO nova.compute.manager [None req-213473ee-2870-4472-b31c-2a9ca93793bf tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Get console output [ 2317.921089] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-213473ee-2870-4472-b31c-2a9ca93793bf tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] The console log is missing. Check your VSPC configuration [ 2318.030754] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Successfully updated port: f6a2852f-1e58-4373-a694-0bdb0031f649 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2318.076558] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2318.196727] env[62875]: DEBUG nova.compute.manager [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2318.196727] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2318.196727] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414c71db-df86-4f32-9bc8-d7fdb94b394e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.204189] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2318.204431] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94411934-85ab-45af-ad38-abfe632dd0bb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.209658] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Successfully created port: 14853567-7b7f-4ef6-a732-a0a4e02ccf8d {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2318.213167] env[62875]: DEBUG oslo_vmware.api [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2318.213167] env[62875]: value = "task-2180608" [ 2318.213167] env[62875]: _type = "Task" [ 2318.213167] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.224695] env[62875]: DEBUG oslo_vmware.api [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.536700] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "refresh_cache-837d85b2-1c61-4286-bd36-49ad273107df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2318.536864] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "refresh_cache-837d85b2-1c61-4286-bd36-49ad273107df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2318.538173] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2318.724423] env[62875]: DEBUG oslo_vmware.api [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180608, 'name': PowerOffVM_Task, 'duration_secs': 0.181291} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2318.724767] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2318.724812] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2318.725043] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5df48e58-a8a1-4458-a41a-cb4be45291b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.068581] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquiring lock "9e0aaea6-96cf-494d-9f70-a709a47f9772" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.068987] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "9e0aaea6-96cf-494d-9f70-a709a47f9772" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.069410] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquiring lock "9e0aaea6-96cf-494d-9f70-a709a47f9772-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.069785] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "9e0aaea6-96cf-494d-9f70-a709a47f9772-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.070061] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "9e0aaea6-96cf-494d-9f70-a709a47f9772-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2319.073251] env[62875]: INFO nova.compute.manager [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Terminating instance [ 2319.075554] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2319.089522] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2319.122991] env[62875]: DEBUG nova.virt.hardware [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2319.123816] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0288e2f1-d5e4-4124-917c-997bfdfc6972 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.131988] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b06cd2d-aeed-43a0-b4e3-5cd83f73d9e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.224281] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Updating instance_info_cache with network_info: [{"id": "f6a2852f-1e58-4373-a694-0bdb0031f649", "address": "fa:16:3e:6d:b8:36", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6a2852f-1e", "ovs_interfaceid": "f6a2852f-1e58-4373-a694-0bdb0031f649", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2319.580315] env[62875]: DEBUG nova.compute.manager [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2319.580566] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2319.581524] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12afb0f-76f4-4db5-9c7d-0a748c8b5c98 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.589147] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2319.589391] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9819668-4ebf-484b-b1e8-c40ea849eded {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.595453] env[62875]: DEBUG oslo_vmware.api [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2319.595453] env[62875]: value = "task-2180610" [ 2319.595453] env[62875]: _type = "Task" [ 2319.595453] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2319.604145] env[62875]: DEBUG oslo_vmware.api [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180610, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2319.727403] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "refresh_cache-837d85b2-1c61-4286-bd36-49ad273107df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2319.727940] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Instance network_info: |[{"id": "f6a2852f-1e58-4373-a694-0bdb0031f649", "address": "fa:16:3e:6d:b8:36", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6a2852f-1e", "ovs_interfaceid": "f6a2852f-1e58-4373-a694-0bdb0031f649", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2319.728975] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:b8:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6a2852f-1e58-4373-a694-0bdb0031f649', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2319.745375] env[62875]: DEBUG oslo.service.loopingcall [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2319.745703] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2319.745951] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98f15c4f-347c-496b-994d-220e80b8ec76 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.771338] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2319.771338] env[62875]: value = "task-2180614" [ 2319.771338] env[62875]: _type = "Task" [ 2319.771338] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2319.780044] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2319.784565] env[62875]: DEBUG nova.compute.manager [req-bda31107-fbff-4657-825b-6b815e3224eb req-6e954f9d-c87d-4b28-adfc-efad70b27fca service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Received event network-changed-f6a2852f-1e58-4373-a694-0bdb0031f649 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2319.784836] env[62875]: DEBUG nova.compute.manager [req-bda31107-fbff-4657-825b-6b815e3224eb req-6e954f9d-c87d-4b28-adfc-efad70b27fca service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Refreshing instance network info cache due to event network-changed-f6a2852f-1e58-4373-a694-0bdb0031f649. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2319.784836] env[62875]: DEBUG oslo_concurrency.lockutils [req-bda31107-fbff-4657-825b-6b815e3224eb req-6e954f9d-c87d-4b28-adfc-efad70b27fca service nova] Acquiring lock "refresh_cache-837d85b2-1c61-4286-bd36-49ad273107df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2319.784978] env[62875]: DEBUG oslo_concurrency.lockutils [req-bda31107-fbff-4657-825b-6b815e3224eb req-6e954f9d-c87d-4b28-adfc-efad70b27fca service nova] Acquired lock "refresh_cache-837d85b2-1c61-4286-bd36-49ad273107df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2319.785120] env[62875]: DEBUG nova.network.neutron [req-bda31107-fbff-4657-825b-6b815e3224eb req-6e954f9d-c87d-4b28-adfc-efad70b27fca service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Refreshing network info cache for port f6a2852f-1e58-4373-a694-0bdb0031f649 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2320.106235] env[62875]: DEBUG oslo_vmware.api [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180610, 'name': PowerOffVM_Task, 'duration_secs': 0.21578} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2320.106235] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2320.106414] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2320.106617] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92d40d52-23cc-45ec-a5b9-bfbcf2792f67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.281166] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2320.490519] env[62875]: DEBUG nova.network.neutron [req-bda31107-fbff-4657-825b-6b815e3224eb req-6e954f9d-c87d-4b28-adfc-efad70b27fca service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Updated VIF entry in instance network info cache for port f6a2852f-1e58-4373-a694-0bdb0031f649. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2320.490894] env[62875]: DEBUG nova.network.neutron [req-bda31107-fbff-4657-825b-6b815e3224eb req-6e954f9d-c87d-4b28-adfc-efad70b27fca service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Updating instance_info_cache with network_info: [{"id": "f6a2852f-1e58-4373-a694-0bdb0031f649", "address": "fa:16:3e:6d:b8:36", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6a2852f-1e", "ovs_interfaceid": "f6a2852f-1e58-4373-a694-0bdb0031f649", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2320.782371] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2320.994300] env[62875]: DEBUG oslo_concurrency.lockutils [req-bda31107-fbff-4657-825b-6b815e3224eb req-6e954f9d-c87d-4b28-adfc-efad70b27fca service nova] Releasing lock "refresh_cache-837d85b2-1c61-4286-bd36-49ad273107df" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2321.283355] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2321.783485] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.284784] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.785737] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.907186] env[62875]: INFO nova.compute.manager [None req-9b63346b-afe8-430b-a2cc-7373e8e2eed5 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Get console output [ 2322.907468] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-9b63346b-afe8-430b-a2cc-7373e8e2eed5 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] The console log is missing. Check your VSPC configuration [ 2323.285349] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2323.786235] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2323.976885] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "45403db3-ff20-42d3-8a37-8db671d8c1fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2323.976885] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "45403db3-ff20-42d3-8a37-8db671d8c1fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2323.977156] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "45403db3-ff20-42d3-8a37-8db671d8c1fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2323.977269] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "45403db3-ff20-42d3-8a37-8db671d8c1fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2323.977449] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "45403db3-ff20-42d3-8a37-8db671d8c1fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2323.979817] env[62875]: INFO nova.compute.manager [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Terminating instance [ 2324.286687] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.483902] env[62875]: DEBUG nova.compute.manager [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2324.484211] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2324.485100] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5fbcfa-2cb3-4766-9fa2-2dec4e0c8957 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.492624] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2324.492843] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d610e7ea-bcf1-4e59-8ccf-9cf912e21c5a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.499806] env[62875]: DEBUG oslo_vmware.api [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2324.499806] env[62875]: value = "task-2180617" [ 2324.499806] env[62875]: _type = "Task" [ 2324.499806] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.507353] env[62875]: DEBUG oslo_vmware.api [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.787259] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.009338] env[62875]: DEBUG oslo_vmware.api [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180617, 'name': PowerOffVM_Task, 'duration_secs': 0.184983} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.009605] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2325.009768] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2325.010017] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e826d011-93a4-4b48-81c6-f97621be3a01 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.295668] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.788256] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.288950] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.542899] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2326.543182] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2326.543380] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Deleting the datastore file [datastore2] d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2326.543661] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f13f86f-1ce4-4a41-978a-e03d457f0a52 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.550872] env[62875]: DEBUG oslo_vmware.api [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2326.550872] env[62875]: value = "task-2180620" [ 2326.550872] env[62875]: _type = "Task" [ 2326.550872] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.554975] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2326.555186] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2326.555371] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Deleting the datastore file [datastore2] 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2326.555917] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1fd2533-8a05-4d1c-9177-ac8a81d2d119 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.560317] env[62875]: DEBUG oslo_vmware.api [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.564835] env[62875]: DEBUG oslo_vmware.api [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for the task: (returnval){ [ 2326.564835] env[62875]: value = "task-2180621" [ 2326.564835] env[62875]: _type = "Task" [ 2326.564835] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.571898] env[62875]: DEBUG oslo_vmware.api [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.789815] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.025910] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2327.026149] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2327.026336] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Deleting the datastore file [datastore1] 9e0aaea6-96cf-494d-9f70-a709a47f9772 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2327.026609] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-014b7fa3-132b-4049-a104-34662e3ed99c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.030746] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2327.030940] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2327.031140] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleting the datastore file [datastore2] 45403db3-ff20-42d3-8a37-8db671d8c1fa {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2327.031361] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-269bdd1c-868d-4f69-9a74-d3bd20a45076 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.035495] env[62875]: DEBUG oslo_vmware.api [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for the task: (returnval){ [ 2327.035495] env[62875]: value = "task-2180623" [ 2327.035495] env[62875]: _type = "Task" [ 2327.035495] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.039557] env[62875]: DEBUG oslo_vmware.api [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2327.039557] env[62875]: value = "task-2180624" [ 2327.039557] env[62875]: _type = "Task" [ 2327.039557] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.045289] env[62875]: DEBUG oslo_vmware.api [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.050388] env[62875]: DEBUG oslo_vmware.api [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.059775] env[62875]: DEBUG oslo_vmware.api [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180620, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127946} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.059994] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2327.060183] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2327.060355] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2327.060525] env[62875]: INFO nova.compute.manager [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Took 8.87 seconds to destroy the instance on the hypervisor. [ 2327.060747] env[62875]: DEBUG oslo.service.loopingcall [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2327.060926] env[62875]: DEBUG nova.compute.manager [-] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2327.061036] env[62875]: DEBUG nova.network.neutron [-] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2327.073621] env[62875]: DEBUG oslo_vmware.api [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Task: {'id': task-2180621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127442} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.073847] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2327.074057] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2327.074230] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2327.074405] env[62875]: INFO nova.compute.manager [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Took 9.93 seconds to destroy the instance on the hypervisor. [ 2327.074628] env[62875]: DEBUG oslo.service.loopingcall [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2327.074811] env[62875]: DEBUG nova.compute.manager [-] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2327.074903] env[62875]: DEBUG nova.network.neutron [-] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2327.291924] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180614, 'name': CreateVM_Task, 'duration_secs': 7.474402} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.291924] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2327.291924] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2327.292190] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2327.292995] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2327.292995] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37f90ffd-0968-4990-9e2c-52e13e61805e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.297562] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2327.297562] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f3353f-bc5b-4092-8cd1-15c5a410944c" [ 2327.297562] env[62875]: _type = "Task" [ 2327.297562] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.305705] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f3353f-bc5b-4092-8cd1-15c5a410944c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.351115] env[62875]: DEBUG nova.compute.manager [req-1a799274-c94a-41c4-a8fd-c3a53a8a5d21 req-2fb64bd2-17f0-4a89-b99d-ad91eaebf8d2 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Received event network-vif-deleted-edf1649b-cbe3-413d-b8ea-f4c89cd08fc5 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2327.351323] env[62875]: INFO nova.compute.manager [req-1a799274-c94a-41c4-a8fd-c3a53a8a5d21 req-2fb64bd2-17f0-4a89-b99d-ad91eaebf8d2 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Neutron deleted interface edf1649b-cbe3-413d-b8ea-f4c89cd08fc5; detaching it from the instance and deleting it from the info cache [ 2327.351576] env[62875]: DEBUG nova.network.neutron [req-1a799274-c94a-41c4-a8fd-c3a53a8a5d21 req-2fb64bd2-17f0-4a89-b99d-ad91eaebf8d2 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2327.357560] env[62875]: DEBUG nova.compute.manager [req-5067f757-1508-4f55-ad4d-6dd65cc86e89 req-bec355a2-79df-4660-b83d-3c3411fb1171 service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Received event network-vif-deleted-0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2327.357560] env[62875]: INFO nova.compute.manager [req-5067f757-1508-4f55-ad4d-6dd65cc86e89 req-bec355a2-79df-4660-b83d-3c3411fb1171 service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Neutron deleted interface 0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86; detaching it from the instance and deleting it from the info cache [ 2327.357742] env[62875]: DEBUG nova.network.neutron [req-5067f757-1508-4f55-ad4d-6dd65cc86e89 req-bec355a2-79df-4660-b83d-3c3411fb1171 service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2327.547223] env[62875]: DEBUG oslo_vmware.api [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Task: {'id': task-2180623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12883} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.547906] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2327.548176] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2327.548395] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2327.548566] env[62875]: INFO nova.compute.manager [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Took 7.97 seconds to destroy the instance on the hypervisor. [ 2327.548792] env[62875]: DEBUG oslo.service.loopingcall [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2327.549015] env[62875]: DEBUG nova.compute.manager [-] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2327.549165] env[62875]: DEBUG nova.network.neutron [-] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2327.553487] env[62875]: DEBUG oslo_vmware.api [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138093} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.553989] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2327.554218] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2327.554400] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2327.554612] env[62875]: INFO nova.compute.manager [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Took 3.07 seconds to destroy the instance on the hypervisor. [ 2327.554797] env[62875]: DEBUG oslo.service.loopingcall [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2327.554956] env[62875]: DEBUG nova.compute.manager [-] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2327.555074] env[62875]: DEBUG nova.network.neutron [-] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2327.808518] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f3353f-bc5b-4092-8cd1-15c5a410944c, 'name': SearchDatastore_Task, 'duration_secs': 0.00903} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.808827] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2327.809055] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2327.809295] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2327.809445] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2327.809907] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2327.810203] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1fe99829-ecb1-4cd5-b9d3-888e850a4ada {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.818634] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2327.818817] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2327.819537] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08c45ed4-da2f-4dfd-aa86-f95e0f3ba97a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.824888] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2327.824888] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd77ed-effa-c82d-b46f-1f5f2b6dc286" [ 2327.824888] env[62875]: _type = "Task" [ 2327.824888] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.832320] env[62875]: DEBUG nova.network.neutron [-] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2327.833489] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd77ed-effa-c82d-b46f-1f5f2b6dc286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.842640] env[62875]: DEBUG nova.network.neutron [-] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2327.854358] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f00e9d8-0559-4c79-87c2-f04c5380b193 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.860203] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75ca8cf5-b6cc-453e-8ceb-a3d01208c751 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.866195] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7981e45e-2e25-425f-97ca-ae8fe3201b5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.881870] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42565bd8-cb17-48b6-860c-d760c5933764 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.898527] env[62875]: DEBUG nova.compute.manager [req-1a799274-c94a-41c4-a8fd-c3a53a8a5d21 req-2fb64bd2-17f0-4a89-b99d-ad91eaebf8d2 service nova] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Detach interface failed, port_id=edf1649b-cbe3-413d-b8ea-f4c89cd08fc5, reason: Instance d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2327.914626] env[62875]: DEBUG nova.compute.manager [req-5067f757-1508-4f55-ad4d-6dd65cc86e89 req-bec355a2-79df-4660-b83d-3c3411fb1171 service nova] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Detach interface failed, port_id=0f4f8284-ea6a-4f5e-9bb7-91ebfbc64e86, reason: Instance 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2328.335462] env[62875]: INFO nova.compute.manager [-] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Took 1.27 seconds to deallocate network for instance. [ 2328.335755] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cd77ed-effa-c82d-b46f-1f5f2b6dc286, 'name': SearchDatastore_Task, 'duration_secs': 0.008567} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.340676] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7528aec1-8090-4231-aa4c-cb571d092810 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.344968] env[62875]: INFO nova.compute.manager [-] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Took 1.27 seconds to deallocate network for instance. [ 2328.347586] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2328.347586] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528d499f-549c-6f68-6c05-825532d02dce" [ 2328.347586] env[62875]: _type = "Task" [ 2328.347586] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.359509] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528d499f-549c-6f68-6c05-825532d02dce, 'name': SearchDatastore_Task, 'duration_secs': 0.010288} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.359783] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2328.360046] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 837d85b2-1c61-4286-bd36-49ad273107df/837d85b2-1c61-4286-bd36-49ad273107df.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2328.360290] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33a46044-441a-4b6a-a360-b033f0af04c0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.367224] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2328.367224] env[62875]: value = "task-2180625" [ 2328.367224] env[62875]: _type = "Task" [ 2328.367224] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.377235] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.500963] env[62875]: DEBUG nova.network.neutron [-] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2328.703157] env[62875]: DEBUG nova.network.neutron [-] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2328.845265] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.845577] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.845805] env[62875]: DEBUG nova.objects.instance [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lazy-loading 'resources' on Instance uuid d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2328.862017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.876647] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464892} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.876920] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 837d85b2-1c61-4286-bd36-49ad273107df/837d85b2-1c61-4286-bd36-49ad273107df.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2328.877287] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2328.877598] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4410ae88-50b2-4437-96fe-0b56b81e031b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.884176] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2328.884176] env[62875]: value = "task-2180627" [ 2328.884176] env[62875]: _type = "Task" [ 2328.884176] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.893655] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.956241] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Successfully updated port: 14853567-7b7f-4ef6-a732-a0a4e02ccf8d {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2329.004028] env[62875]: INFO nova.compute.manager [-] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Took 1.45 seconds to deallocate network for instance. [ 2329.206764] env[62875]: INFO nova.compute.manager [-] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Took 1.65 seconds to deallocate network for instance. [ 2329.382134] env[62875]: DEBUG nova.compute.manager [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Received event network-vif-deleted-76f0c221-d292-49e7-9f24-d43fa768395b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2329.382395] env[62875]: DEBUG nova.compute.manager [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Received event network-vif-plugged-14853567-7b7f-4ef6-a732-a0a4e02ccf8d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2329.382395] env[62875]: DEBUG oslo_concurrency.lockutils [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] Acquiring lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.382555] env[62875]: DEBUG oslo_concurrency.lockutils [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] Lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2329.383216] env[62875]: DEBUG oslo_concurrency.lockutils [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] Lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2329.383216] env[62875]: DEBUG nova.compute.manager [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] No waiting events found dispatching network-vif-plugged-14853567-7b7f-4ef6-a732-a0a4e02ccf8d {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2329.383216] env[62875]: WARNING nova.compute.manager [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Received unexpected event network-vif-plugged-14853567-7b7f-4ef6-a732-a0a4e02ccf8d for instance with vm_state building and task_state spawning. [ 2329.383216] env[62875]: DEBUG nova.compute.manager [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Received event network-changed-14853567-7b7f-4ef6-a732-a0a4e02ccf8d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2329.383603] env[62875]: DEBUG nova.compute.manager [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Refreshing instance network info cache due to event network-changed-14853567-7b7f-4ef6-a732-a0a4e02ccf8d. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2329.383603] env[62875]: DEBUG oslo_concurrency.lockutils [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] Acquiring lock "refresh_cache-ff4b45e4-da16-4943-b7c0-a8d92781e4c0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.383749] env[62875]: DEBUG oslo_concurrency.lockutils [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] Acquired lock "refresh_cache-ff4b45e4-da16-4943-b7c0-a8d92781e4c0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.383905] env[62875]: DEBUG nova.network.neutron [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Refreshing network info cache for port 14853567-7b7f-4ef6-a732-a0a4e02ccf8d {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2329.389197] env[62875]: DEBUG nova.compute.manager [req-badce4da-3cea-4ab4-8f54-9e6aecb73068 req-e4b72fe2-c2d8-44de-aa83-381abd34594d service nova] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Received event network-vif-deleted-c4a5613b-5345-49d7-b791-29a0dbe58ed2 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2329.398921] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059184} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.401277] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2329.402398] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6be1df-a001-4baa-83fc-7a8a9a118d18 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.425244] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 837d85b2-1c61-4286-bd36-49ad273107df/837d85b2-1c61-4286-bd36-49ad273107df.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2329.427758] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-494bd43a-af5e-4f76-a496-bfe99c3e36fe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.448221] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2329.448221] env[62875]: value = "task-2180628" [ 2329.448221] env[62875]: _type = "Task" [ 2329.448221] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.457202] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.458754] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "refresh_cache-ff4b45e4-da16-4943-b7c0-a8d92781e4c0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.503259] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79782746-71be-41cf-9d55-820361bc799d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.510245] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.511316] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ee0236-b013-407f-9c4c-65a8f5fac0a5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.543049] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7039f67a-b584-42ce-8661-3b787d33f917 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.550139] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a917dc12-9163-448c-98de-4c41cf74637e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.563119] env[62875]: DEBUG nova.compute.provider_tree [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2329.717574] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.927357] env[62875]: DEBUG nova.network.neutron [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2329.958802] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.002234] env[62875]: DEBUG nova.network.neutron [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2330.083710] env[62875]: ERROR nova.scheduler.client.report [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [req-f47fedca-73c1-42db-bc5a-4b4a28907d95] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f47fedca-73c1-42db-bc5a-4b4a28907d95"}]} [ 2330.101928] env[62875]: DEBUG nova.scheduler.client.report [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2330.115847] env[62875]: DEBUG nova.scheduler.client.report [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2330.116120] env[62875]: DEBUG nova.compute.provider_tree [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2330.127214] env[62875]: DEBUG nova.scheduler.client.report [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2330.147242] env[62875]: DEBUG nova.scheduler.client.report [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2330.266301] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee455518-0346-4ba3-987e-c0e771bec38f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.274520] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2fd461-140f-45f3-ab02-cd7a261d5b4a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.304962] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b172e3e-9b84-4d64-b3c9-7867b048a35a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.312695] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09307e23-a17d-4036-826d-e62f894f2793 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.327365] env[62875]: DEBUG nova.compute.provider_tree [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2330.460160] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180628, 'name': ReconfigVM_Task, 'duration_secs': 0.805971} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.460432] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 837d85b2-1c61-4286-bd36-49ad273107df/837d85b2-1c61-4286-bd36-49ad273107df.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2330.461083] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9024613f-e52f-4b03-ad81-6145e447478d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.467681] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2330.467681] env[62875]: value = "task-2180629" [ 2330.467681] env[62875]: _type = "Task" [ 2330.467681] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.475766] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180629, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.505656] env[62875]: DEBUG oslo_concurrency.lockutils [req-16122447-4c84-49a6-8c7e-1aab97add876 req-1c9124ae-9ee8-4f6a-a5ad-48fdedabfcfb service nova] Releasing lock "refresh_cache-ff4b45e4-da16-4943-b7c0-a8d92781e4c0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.506186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "refresh_cache-ff4b45e4-da16-4943-b7c0-a8d92781e4c0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.506422] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2330.511384] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.511591] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.511751] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.511905] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.512053] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2330.830926] env[62875]: DEBUG nova.scheduler.client.report [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2330.978982] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180629, 'name': Rename_Task, 'duration_secs': 0.16693} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.979473] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2330.979731] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb642c2b-0972-4cad-a551-ed1de444fdc0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.987018] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2330.987018] env[62875]: value = "task-2180631" [ 2330.987018] env[62875]: _type = "Task" [ 2330.987018] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.994615] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180631, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.040188] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2331.177496] env[62875]: DEBUG nova.network.neutron [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Updating instance_info_cache with network_info: [{"id": "14853567-7b7f-4ef6-a732-a0a4e02ccf8d", "address": "fa:16:3e:1c:52:d9", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14853567-7b", "ovs_interfaceid": "14853567-7b7f-4ef6-a732-a0a4e02ccf8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.336336] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.491s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.339165] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.482s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2331.339436] env[62875]: DEBUG nova.objects.instance [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lazy-loading 'resources' on Instance uuid 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2331.354990] env[62875]: INFO nova.scheduler.client.report [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Deleted allocations for instance d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf [ 2331.496891] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180631, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.680902] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "refresh_cache-ff4b45e4-da16-4943-b7c0-a8d92781e4c0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2331.680902] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Instance network_info: |[{"id": "14853567-7b7f-4ef6-a732-a0a4e02ccf8d", "address": "fa:16:3e:1c:52:d9", "network": {"id": "59b4d410-9ba8-443f-aa0d-60b8e236a1f6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-947037515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4f66deaad8e44f8b352e02a74ca6613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14853567-7b", "ovs_interfaceid": "14853567-7b7f-4ef6-a732-a0a4e02ccf8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2331.681418] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:52:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14853567-7b7f-4ef6-a732-a0a4e02ccf8d', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2331.688936] env[62875]: DEBUG oslo.service.loopingcall [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2331.689017] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2331.690022] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90d9a0aa-5042-4e21-abd6-136df64c7832 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.709496] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2331.709496] env[62875]: value = "task-2180632" [ 2331.709496] env[62875]: _type = "Task" [ 2331.709496] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.717318] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180632, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.864905] env[62875]: DEBUG oslo_concurrency.lockutils [None req-460c7f1a-ee95-47cb-a1db-c173ee22b712 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.177s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.946561] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfd88df-9799-466a-a4f0-5c5390e38f16 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.954147] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb8e361-7048-4e0b-a317-0f2f655046ae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.983933] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b716ec-c500-4bdb-8cff-d386e7c790d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.993542] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a788ae3-c689-43f0-a636-36efcbcf863c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.999863] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180631, 'name': PowerOnVM_Task, 'duration_secs': 0.6575} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.000436] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2332.000647] env[62875]: INFO nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Took 15.17 seconds to spawn the instance on the hypervisor. [ 2332.000828] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2332.001528] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e15f17b-43df-4ada-b8eb-48bcb996bfe6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.011401] env[62875]: DEBUG nova.compute.provider_tree [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2332.104780] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "9b3628be-b8a3-4105-bc84-088dede23aaf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.105066] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "9b3628be-b8a3-4105-bc84-088dede23aaf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.105290] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "9b3628be-b8a3-4105-bc84-088dede23aaf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.105498] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "9b3628be-b8a3-4105-bc84-088dede23aaf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.105696] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "9b3628be-b8a3-4105-bc84-088dede23aaf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2332.108223] env[62875]: INFO nova.compute.manager [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Terminating instance [ 2332.218872] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180632, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.515087] env[62875]: DEBUG nova.scheduler.client.report [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2332.526317] env[62875]: INFO nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Took 20.03 seconds to build instance. [ 2332.612138] env[62875]: DEBUG nova.compute.manager [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2332.612295] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2332.613224] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0451f656-4e2e-4ae2-9b07-a3b01d21e7d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.622360] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2332.622588] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03892a27-3f6a-49ce-8e1c-b96537fb8a78 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.627969] env[62875]: DEBUG oslo_vmware.api [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2332.627969] env[62875]: value = "task-2180633" [ 2332.627969] env[62875]: _type = "Task" [ 2332.627969] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.637365] env[62875]: DEBUG oslo_vmware.api [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180633, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.706646] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2332.719675] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180632, 'name': CreateVM_Task, 'duration_secs': 0.639127} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.719842] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2332.720607] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2332.720779] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2332.721236] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2332.721379] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dfa457f-36eb-4bce-b61b-d18dcae0bac2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.726168] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2332.726168] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d8abdd-4a7a-8d75-d258-26d101f37126" [ 2332.726168] env[62875]: _type = "Task" [ 2332.726168] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.734155] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d8abdd-4a7a-8d75-d258-26d101f37126, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.020395] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.022712] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.512s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.022943] env[62875]: DEBUG nova.objects.instance [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lazy-loading 'resources' on Instance uuid 9e0aaea6-96cf-494d-9f70-a709a47f9772 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2333.027838] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "837d85b2-1c61-4286-bd36-49ad273107df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.538s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.040605] env[62875]: INFO nova.scheduler.client.report [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Deleted allocations for instance 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d [ 2333.137294] env[62875]: DEBUG oslo_vmware.api [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180633, 'name': PowerOffVM_Task, 'duration_secs': 0.228671} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.137537] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2333.137703] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2333.137949] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e182cced-75e2-44dd-8be1-e863657cffe9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.228754] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2333.228979] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2333.229189] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Deleting the datastore file [datastore2] 9b3628be-b8a3-4105-bc84-088dede23aaf {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2333.232358] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d544665-1cf3-4682-ab5f-5addb74e44eb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.239476] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d8abdd-4a7a-8d75-d258-26d101f37126, 'name': SearchDatastore_Task, 'duration_secs': 0.009732} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.240761] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2333.241010] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2333.241290] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2333.241507] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2333.241721] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2333.242068] env[62875]: DEBUG oslo_vmware.api [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for the task: (returnval){ [ 2333.242068] env[62875]: value = "task-2180636" [ 2333.242068] env[62875]: _type = "Task" [ 2333.242068] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.242291] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c99574b-a8b7-42b6-97c9-7965b0c2ad62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.251479] env[62875]: DEBUG oslo_vmware.api [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.252781] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2333.253026] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2333.253780] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baae8932-62b6-436d-af4e-07b88ffceaa4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.258380] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2333.258380] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52761506-e060-49ad-5ec0-4170824d2f28" [ 2333.258380] env[62875]: _type = "Task" [ 2333.258380] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.265879] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52761506-e060-49ad-5ec0-4170824d2f28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.549912] env[62875]: DEBUG oslo_concurrency.lockutils [None req-407491c7-e10f-44c8-9f08-752f49ba8325 tempest-ServersNegativeTestMultiTenantJSON-731435960 tempest-ServersNegativeTestMultiTenantJSON-731435960-project-member] Lock "9e6c5891-a5fe-49f0-94af-6ac6d3704d7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.912s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.620007] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa3f6a2-309c-4e3c-b3f8-ac3e687b0e2b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.627713] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c067f2db-bafd-4625-b78b-ffb20fca125d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.659266] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905c1aa7-4c6f-4c9f-9225-410f5bfa7e8f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.666817] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35888c8-11c1-44cb-aa11-7eefce230ec2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.680014] env[62875]: DEBUG nova.compute.provider_tree [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2333.755943] env[62875]: DEBUG oslo_vmware.api [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Task: {'id': task-2180636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136674} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.756268] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2333.756467] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2333.756695] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2333.757403] env[62875]: INFO nova.compute.manager [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2333.757403] env[62875]: DEBUG oslo.service.loopingcall [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2333.757570] env[62875]: DEBUG nova.compute.manager [-] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2333.757570] env[62875]: DEBUG nova.network.neutron [-] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2333.770818] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52761506-e060-49ad-5ec0-4170824d2f28, 'name': SearchDatastore_Task, 'duration_secs': 0.008095} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.771679] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43df360f-63dd-45fd-bcc5-2676594a76cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.777227] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2333.777227] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528c92fb-5dac-72eb-7ae8-e0e6463fba65" [ 2333.777227] env[62875]: _type = "Task" [ 2333.777227] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.785803] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528c92fb-5dac-72eb-7ae8-e0e6463fba65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.088619] env[62875]: DEBUG nova.compute.manager [req-3895a48f-827c-4eb4-be31-13b566c03d3b req-b2a2f636-bfe9-4810-be36-08ae2bd1fd2d service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Received event network-vif-deleted-37efe56a-09a3-4de0-ad7e-7c89ebad0afc {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2334.088619] env[62875]: INFO nova.compute.manager [req-3895a48f-827c-4eb4-be31-13b566c03d3b req-b2a2f636-bfe9-4810-be36-08ae2bd1fd2d service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Neutron deleted interface 37efe56a-09a3-4de0-ad7e-7c89ebad0afc; detaching it from the instance and deleting it from the info cache [ 2334.088619] env[62875]: DEBUG nova.network.neutron [req-3895a48f-827c-4eb4-be31-13b566c03d3b req-b2a2f636-bfe9-4810-be36-08ae2bd1fd2d service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2334.217729] env[62875]: DEBUG nova.scheduler.client.report [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 114 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2334.218023] env[62875]: DEBUG nova.compute.provider_tree [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 114 to 115 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2334.218217] env[62875]: DEBUG nova.compute.provider_tree [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2334.287621] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528c92fb-5dac-72eb-7ae8-e0e6463fba65, 'name': SearchDatastore_Task, 'duration_secs': 0.009623} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.287921] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2334.288217] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] ff4b45e4-da16-4943-b7c0-a8d92781e4c0/ff4b45e4-da16-4943-b7c0-a8d92781e4c0.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2334.288489] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92df690d-59e7-4140-95ce-f091a147afb7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.295286] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2334.295286] env[62875]: value = "task-2180637" [ 2334.295286] env[62875]: _type = "Task" [ 2334.295286] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.303858] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180637, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.571431] env[62875]: DEBUG nova.network.neutron [-] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2334.591664] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9559ca3f-a512-452e-b933-3aa8fa0c87d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.608878] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c555ac-e4d3-4f8f-88db-f64b12ba0e48 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.635827] env[62875]: DEBUG nova.compute.manager [req-3895a48f-827c-4eb4-be31-13b566c03d3b req-b2a2f636-bfe9-4810-be36-08ae2bd1fd2d service nova] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Detach interface failed, port_id=37efe56a-09a3-4de0-ad7e-7c89ebad0afc, reason: Instance 9b3628be-b8a3-4105-bc84-088dede23aaf could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2334.723621] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.726631] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.009s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2334.726631] env[62875]: DEBUG nova.objects.instance [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lazy-loading 'resources' on Instance uuid 45403db3-ff20-42d3-8a37-8db671d8c1fa {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2334.752218] env[62875]: INFO nova.scheduler.client.report [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Deleted allocations for instance 9e0aaea6-96cf-494d-9f70-a709a47f9772 [ 2334.806202] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180637, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475509} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.806393] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] ff4b45e4-da16-4943-b7c0-a8d92781e4c0/ff4b45e4-da16-4943-b7c0-a8d92781e4c0.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2334.806628] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2334.806858] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1f2a8c0-6ce2-4cde-aa95-f171013c8001 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.813735] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2334.813735] env[62875]: value = "task-2180638" [ 2334.813735] env[62875]: _type = "Task" [ 2334.813735] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.823570] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.074498] env[62875]: INFO nova.compute.manager [-] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Took 1.32 seconds to deallocate network for instance. [ 2335.262559] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ed69f758-21fb-4729-9cd9-f0d9b0d1a5e6 tempest-ServerActionsTestOtherB-1222443432 tempest-ServerActionsTestOtherB-1222443432-project-member] Lock "9e0aaea6-96cf-494d-9f70-a709a47f9772" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.193s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.323680] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078065} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.323935] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2335.324823] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8c8091-1a26-4233-bb3d-87daf2986ac7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.353878] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] ff4b45e4-da16-4943-b7c0-a8d92781e4c0/ff4b45e4-da16-4943-b7c0-a8d92781e4c0.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2335.355652] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ee1e38b-535a-4610-8e14-db6687a8ea63 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.371911] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8aadb2-da99-41aa-92f8-23137146a11f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.380760] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af106fd-1e2d-404a-9006-2db3ef885766 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.383898] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2335.383898] env[62875]: value = "task-2180640" [ 2335.383898] env[62875]: _type = "Task" [ 2335.383898] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.418293] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1953859-e6ec-4940-b597-b159504f244a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.421038] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180640, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.426503] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d434f2ea-da4d-4e26-8fa8-7c8f6c6c1b15 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.440124] env[62875]: DEBUG nova.compute.provider_tree [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2335.585092] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2335.706396] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2335.894663] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180640, 'name': ReconfigVM_Task, 'duration_secs': 0.315357} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.894992] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Reconfigured VM instance instance-0000005e to attach disk [datastore1] ff4b45e4-da16-4943-b7c0-a8d92781e4c0/ff4b45e4-da16-4943-b7c0-a8d92781e4c0.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2335.895706] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93078357-3425-495f-9b46-5b46d1ad234d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.902381] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2335.902381] env[62875]: value = "task-2180641" [ 2335.902381] env[62875]: _type = "Task" [ 2335.902381] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.910764] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180641, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.943304] env[62875]: DEBUG nova.scheduler.client.report [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2336.414734] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180641, 'name': Rename_Task, 'duration_secs': 0.146183} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2336.415024] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2336.415262] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65597007-59c9-428e-91d4-142677e4e8da {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.422422] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2336.422422] env[62875]: value = "task-2180642" [ 2336.422422] env[62875]: _type = "Task" [ 2336.422422] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2336.430309] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2336.450358] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.724s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2336.453540] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.868s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2336.453853] env[62875]: DEBUG nova.objects.instance [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lazy-loading 'resources' on Instance uuid 9b3628be-b8a3-4105-bc84-088dede23aaf {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2336.488141] env[62875]: INFO nova.scheduler.client.report [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleted allocations for instance 45403db3-ff20-42d3-8a37-8db671d8c1fa [ 2336.932171] env[62875]: DEBUG oslo_vmware.api [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180642, 'name': PowerOnVM_Task, 'duration_secs': 0.422078} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2336.932431] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2336.932630] env[62875]: INFO nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Took 17.84 seconds to spawn the instance on the hypervisor. [ 2336.932988] env[62875]: DEBUG nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2336.933590] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d97d9e-914b-424d-9e07-4b2b042888c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.000348] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e7fb911a-65f7-4b8d-a265-b288f5d5c324 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "45403db3-ff20-42d3-8a37-8db671d8c1fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.023s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2337.053254] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a4d2c4-4340-4052-8989-c3a44cd29040 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.063345] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e41e61f-3328-43a0-a75c-cc0287307013 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.095205] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f65b5dc-d538-409d-97e7-4638bf955507 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.103038] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a961bd40-d309-43e4-b372-34b9067538d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.116255] env[62875]: DEBUG nova.compute.provider_tree [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2337.450709] env[62875]: INFO nova.compute.manager [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Took 24.92 seconds to build instance. [ 2337.619823] env[62875]: DEBUG nova.scheduler.client.report [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2337.706949] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2337.952817] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dda51876-d78d-4276-a43a-0467918448a9 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.433s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2338.127570] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.672s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2338.153743] env[62875]: INFO nova.scheduler.client.report [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Deleted allocations for instance 9b3628be-b8a3-4105-bc84-088dede23aaf [ 2338.295676] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "837d85b2-1c61-4286-bd36-49ad273107df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2338.295676] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "837d85b2-1c61-4286-bd36-49ad273107df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2338.295676] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "837d85b2-1c61-4286-bd36-49ad273107df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2338.295676] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "837d85b2-1c61-4286-bd36-49ad273107df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2338.295676] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "837d85b2-1c61-4286-bd36-49ad273107df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2338.297533] env[62875]: INFO nova.compute.manager [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Terminating instance [ 2338.393539] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2338.394056] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2338.394336] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2338.394615] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2338.394882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2338.397010] env[62875]: INFO nova.compute.manager [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Terminating instance [ 2338.664146] env[62875]: DEBUG oslo_concurrency.lockutils [None req-05f7f513-f84e-4bd5-b147-a76675b0af37 tempest-ListImageFiltersTestJSON-1297249689 tempest-ListImageFiltersTestJSON-1297249689-project-member] Lock "9b3628be-b8a3-4105-bc84-088dede23aaf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.558s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2338.801841] env[62875]: DEBUG nova.compute.manager [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2338.802099] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2338.803064] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439723bf-0faa-48d3-bd80-55573875c702 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.812484] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2338.812737] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6feeff34-dfa4-4ad1-902c-b235e1389b50 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.822779] env[62875]: DEBUG oslo_vmware.api [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2338.822779] env[62875]: value = "task-2180643" [ 2338.822779] env[62875]: _type = "Task" [ 2338.822779] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.834429] env[62875]: DEBUG oslo_vmware.api [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.904022] env[62875]: DEBUG nova.compute.manager [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2338.904022] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2338.904022] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a20c37-24b0-47ad-908d-3c08904b511c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.914078] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2338.914585] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2338.918954] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2338.919536] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46a47b7c-7100-4f39-9244-7a34ab93dcf8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.926374] env[62875]: DEBUG oslo_vmware.api [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2338.926374] env[62875]: value = "task-2180644" [ 2338.926374] env[62875]: _type = "Task" [ 2338.926374] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.938469] env[62875]: DEBUG oslo_vmware.api [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2339.136998] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Acquiring lock "760f10ab-4617-418b-b922-4f9835eb96f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2339.138858] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "760f10ab-4617-418b-b922-4f9835eb96f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2339.337060] env[62875]: DEBUG oslo_vmware.api [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180643, 'name': PowerOffVM_Task, 'duration_secs': 0.227352} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2339.337060] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2339.337060] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2339.337060] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e30fb0b-bcbc-4684-bad8-d5425dfa9fc4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.423309] env[62875]: DEBUG nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2339.440139] env[62875]: DEBUG oslo_vmware.api [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180644, 'name': PowerOffVM_Task, 'duration_secs': 0.172513} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2339.440803] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2339.441289] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2339.441868] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-759c3f6a-2e2b-4f05-9651-529bec04d049 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.640383] env[62875]: DEBUG nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2339.732370] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2339.732702] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2339.732813] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleting the datastore file [datastore1] 837d85b2-1c61-4286-bd36-49ad273107df {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2339.733041] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-602f7c43-cdf9-40a7-aafc-330fca799b64 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.737365] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2339.737476] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2339.737598] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleting the datastore file [datastore1] ff4b45e4-da16-4943-b7c0-a8d92781e4c0 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2339.738229] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e37684b2-f595-4486-8546-52672e17c2d0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.744043] env[62875]: DEBUG oslo_vmware.api [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2339.744043] env[62875]: value = "task-2180647" [ 2339.744043] env[62875]: _type = "Task" [ 2339.744043] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2339.745445] env[62875]: DEBUG oslo_vmware.api [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for the task: (returnval){ [ 2339.745445] env[62875]: value = "task-2180648" [ 2339.745445] env[62875]: _type = "Task" [ 2339.745445] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2339.760038] env[62875]: DEBUG oslo_vmware.api [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2339.763501] env[62875]: DEBUG oslo_vmware.api [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2339.948862] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2339.948862] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2339.949491] env[62875]: INFO nova.compute.claims [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2340.162928] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2340.259279] env[62875]: DEBUG oslo_vmware.api [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16789} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.262907] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2340.263183] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2340.263478] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2340.263717] env[62875]: INFO nova.compute.manager [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Took 1.36 seconds to destroy the instance on the hypervisor. [ 2340.263820] env[62875]: DEBUG oslo.service.loopingcall [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2340.264043] env[62875]: DEBUG oslo_vmware.api [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Task: {'id': task-2180647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161702} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.264297] env[62875]: DEBUG nova.compute.manager [-] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2340.264645] env[62875]: DEBUG nova.network.neutron [-] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2340.266530] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2340.266716] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2340.266891] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2340.267076] env[62875]: INFO nova.compute.manager [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Took 1.47 seconds to destroy the instance on the hypervisor. [ 2340.267304] env[62875]: DEBUG oslo.service.loopingcall [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2340.267492] env[62875]: DEBUG nova.compute.manager [-] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2340.267584] env[62875]: DEBUG nova.network.neutron [-] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2340.706376] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.706637] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2341.028055] env[62875]: DEBUG nova.compute.manager [req-ef7d366a-68d9-45be-9461-7bbae36a4a2c req-13cbbbca-738e-4bdf-b0ec-992e50705bb9 service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Received event network-vif-deleted-f6a2852f-1e58-4373-a694-0bdb0031f649 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2341.028320] env[62875]: INFO nova.compute.manager [req-ef7d366a-68d9-45be-9461-7bbae36a4a2c req-13cbbbca-738e-4bdf-b0ec-992e50705bb9 service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Neutron deleted interface f6a2852f-1e58-4373-a694-0bdb0031f649; detaching it from the instance and deleting it from the info cache [ 2341.028576] env[62875]: DEBUG nova.network.neutron [req-ef7d366a-68d9-45be-9461-7bbae36a4a2c req-13cbbbca-738e-4bdf-b0ec-992e50705bb9 service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2341.073544] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388802be-2626-4764-b918-2a10f01a9d97 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.081407] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc914c3-59a5-480a-9066-bff39ebef85e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.112742] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac9c255-2fae-496e-a807-8d90a390e569 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.120378] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e17b38b-5915-4ad2-81fb-f51618685a38 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.133913] env[62875]: DEBUG nova.compute.provider_tree [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2341.237490] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2341.237655] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2341.237810] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2341.505574] env[62875]: DEBUG nova.network.neutron [-] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2341.534663] env[62875]: DEBUG nova.network.neutron [-] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2341.536219] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a628c9d4-f194-414b-88bd-b473ec5ee0fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.545864] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230eb7e4-1403-425f-b997-0918ad5daab3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.570162] env[62875]: DEBUG nova.compute.manager [req-ef7d366a-68d9-45be-9461-7bbae36a4a2c req-13cbbbca-738e-4bdf-b0ec-992e50705bb9 service nova] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Detach interface failed, port_id=f6a2852f-1e58-4373-a694-0bdb0031f649, reason: Instance 837d85b2-1c61-4286-bd36-49ad273107df could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2341.637535] env[62875]: DEBUG nova.scheduler.client.report [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2341.749883] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2341.750202] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2342.008777] env[62875]: INFO nova.compute.manager [-] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Took 1.74 seconds to deallocate network for instance. [ 2342.036683] env[62875]: INFO nova.compute.manager [-] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Took 1.77 seconds to deallocate network for instance. [ 2342.142207] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2342.142710] env[62875]: DEBUG nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2342.145354] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.983s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2342.146734] env[62875]: INFO nova.compute.claims [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2342.252609] env[62875]: DEBUG nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2342.469765] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updating instance_info_cache with network_info: [{"id": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "address": "fa:16:3e:1e:f3:2e", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba73130-21", "ovs_interfaceid": "8ba73130-211b-42d3-b2b1-b1b72ee95433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2342.514878] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.544687] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.651487] env[62875]: DEBUG nova.compute.utils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2342.654890] env[62875]: DEBUG nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2342.655086] env[62875]: DEBUG nova.network.neutron [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2342.694042] env[62875]: DEBUG nova.policy [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '791a39d3328040d5aa1140485a997d43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b93283112aad44f4833c1cc017a566db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2342.781977] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.971939] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-67ac6916-04f3-4eb8-b7da-37a5b28b50d9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2342.972148] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2342.972394] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2342.987685] env[62875]: DEBUG nova.network.neutron [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Successfully created port: ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2343.053950] env[62875]: DEBUG nova.compute.manager [req-fc3f1e28-f6ac-4371-bca1-22a28beb8b06 req-356ee121-66ce-43ab-a2d5-fd3cd3c771d8 service nova] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Received event network-vif-deleted-14853567-7b7f-4ef6-a732-a0a4e02ccf8d {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2343.155553] env[62875]: DEBUG nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2343.246827] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68be03ae-4a6c-4ce7-9093-cefcec4521f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.254891] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c25a18-693d-4137-bd28-9087100852c5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.285800] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47153ca-4c77-469c-8ad6-8f43caa41859 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.293350] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11427ea7-38b3-423c-aee1-4e7eac079867 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.306465] env[62875]: DEBUG nova.compute.provider_tree [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2343.474985] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.809628] env[62875]: DEBUG nova.scheduler.client.report [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2344.167933] env[62875]: DEBUG nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2344.192374] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2344.192627] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2344.192786] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2344.192968] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2344.193136] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2344.193338] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2344.193564] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2344.193727] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2344.193895] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2344.194073] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2344.194645] env[62875]: DEBUG nova.virt.hardware [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2344.195157] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d4ab2e-3e61-4778-bb00-9364143aee37 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.203144] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f95ef4-570b-42a2-9365-3fb490abe61c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.315070] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.170s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.315445] env[62875]: DEBUG nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2344.318190] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.803s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2344.318450] env[62875]: DEBUG nova.objects.instance [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lazy-loading 'resources' on Instance uuid 837d85b2-1c61-4286-bd36-49ad273107df {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2344.453218] env[62875]: DEBUG nova.network.neutron [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Successfully updated port: ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2344.822849] env[62875]: DEBUG nova.compute.utils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2344.827038] env[62875]: DEBUG nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2344.827230] env[62875]: DEBUG nova.network.neutron [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2344.869362] env[62875]: DEBUG nova.policy [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f72bf7ed6aa4ad5b027167a6a0a4d40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4d3024874d9450e82c83dc6ccc591e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2344.922710] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7613cfa-d1b7-4eb5-83dc-07c121c296ea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.930634] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cab837-a4a6-437b-a4bd-b6d1f435d7a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.960420] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2344.960505] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2344.960676] env[62875]: DEBUG nova.network.neutron [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2344.962558] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22743a6b-b6e7-40ea-afbc-83d6f7baf968 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.971488] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a20ea63-e0db-42e0-a997-c94b4968fd96 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.985654] env[62875]: DEBUG nova.compute.provider_tree [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2345.108112] env[62875]: DEBUG nova.compute.manager [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Received event network-vif-plugged-ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2345.108352] env[62875]: DEBUG oslo_concurrency.lockutils [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] Acquiring lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2345.108600] env[62875]: DEBUG oslo_concurrency.lockutils [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2345.108776] env[62875]: DEBUG oslo_concurrency.lockutils [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2345.108946] env[62875]: DEBUG nova.compute.manager [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] No waiting events found dispatching network-vif-plugged-ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2345.109395] env[62875]: WARNING nova.compute.manager [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Received unexpected event network-vif-plugged-ab61f4d3-e2ed-435c-aee6-f21043aa9660 for instance with vm_state building and task_state spawning. [ 2345.109605] env[62875]: DEBUG nova.compute.manager [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Received event network-changed-ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2345.109670] env[62875]: DEBUG nova.compute.manager [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Refreshing instance network info cache due to event network-changed-ab61f4d3-e2ed-435c-aee6-f21043aa9660. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2345.109830] env[62875]: DEBUG oslo_concurrency.lockutils [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] Acquiring lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2345.233575] env[62875]: DEBUG nova.network.neutron [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Successfully created port: ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2345.328604] env[62875]: DEBUG nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2345.488686] env[62875]: DEBUG nova.scheduler.client.report [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2345.494971] env[62875]: DEBUG nova.network.neutron [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2345.621153] env[62875]: DEBUG nova.network.neutron [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updating instance_info_cache with network_info: [{"id": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "address": "fa:16:3e:d7:6b:a0", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab61f4d3-e2", "ovs_interfaceid": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2345.835277] env[62875]: INFO nova.virt.block_device [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Booting with volume 80b0118f-e6e2-416e-bd9d-2cf922e866bc at /dev/sda [ 2345.870087] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c3a6055-fd75-4c21-9ba4-49e0a6d8ae64 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.879615] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50cb5525-77f7-45b6-ad64-9af802c75e15 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.903092] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02454047-3fab-40b3-8a92-7880571f8c94 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.911056] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696bfca3-c62b-4dcd-a502-17136ad23b10 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.935196] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdbfdfb-b505-44e5-8d50-d4101376d4d1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.954994] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eccdbe6-a74c-4942-bc50-b8785473023c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.970145] env[62875]: DEBUG nova.virt.block_device [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updating existing volume attachment record: 3a0caaa0-5cad-4792-bf3f-2bec2bda02ea {{(pid=62875) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2345.994873] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2345.998200] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.454s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2345.998684] env[62875]: DEBUG nova.objects.instance [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lazy-loading 'resources' on Instance uuid ff4b45e4-da16-4943-b7c0-a8d92781e4c0 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2346.015180] env[62875]: INFO nova.scheduler.client.report [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleted allocations for instance 837d85b2-1c61-4286-bd36-49ad273107df [ 2346.123975] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2346.124393] env[62875]: DEBUG nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Instance network_info: |[{"id": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "address": "fa:16:3e:d7:6b:a0", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab61f4d3-e2", "ovs_interfaceid": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2346.124711] env[62875]: DEBUG oslo_concurrency.lockutils [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] Acquired lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2346.124898] env[62875]: DEBUG nova.network.neutron [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Refreshing network info cache for port ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2346.126339] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:6b:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab61f4d3-e2ed-435c-aee6-f21043aa9660', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2346.133857] env[62875]: DEBUG oslo.service.loopingcall [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2346.134763] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2346.134986] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-440fe1c4-cd08-4c74-aad4-80f08293db9a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.154387] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2346.154387] env[62875]: value = "task-2180649" [ 2346.154387] env[62875]: _type = "Task" [ 2346.154387] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2346.162241] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2346.522588] env[62875]: DEBUG oslo_concurrency.lockutils [None req-a5f3d384-8bf5-49fa-85a8-66da300024b4 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "837d85b2-1c61-4286-bd36-49ad273107df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.229s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2346.576590] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b77eb75-aa79-4ccb-b3c8-067b31c0421e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.584166] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6da777-2ca6-41cf-98cf-5ddaca822d59 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.615758] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed54200d-19ad-449e-bb5d-7cb21d15b7db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.622732] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0ff75f-5f33-48f7-9774-1fee08b072e4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.637418] env[62875]: DEBUG nova.compute.provider_tree [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2346.663175] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2346.832309] env[62875]: DEBUG nova.network.neutron [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updated VIF entry in instance network info cache for port ab61f4d3-e2ed-435c-aee6-f21043aa9660. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2346.832661] env[62875]: DEBUG nova.network.neutron [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updating instance_info_cache with network_info: [{"id": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "address": "fa:16:3e:d7:6b:a0", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab61f4d3-e2", "ovs_interfaceid": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2347.140750] env[62875]: DEBUG nova.scheduler.client.report [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2347.165602] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2347.335511] env[62875]: DEBUG oslo_concurrency.lockutils [req-042bb7e0-0aec-4d36-bc72-80ab7d3086dd req-749fe56a-57fd-492f-af78-d164faa2df27 service nova] Releasing lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2347.645136] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.647s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2347.648047] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.866s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.648994] env[62875]: INFO nova.compute.claims [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2347.664042] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2347.667027] env[62875]: INFO nova.scheduler.client.report [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Deleted allocations for instance ff4b45e4-da16-4943-b7c0-a8d92781e4c0 [ 2348.052741] env[62875]: DEBUG nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2348.053361] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2348.053590] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2348.053749] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2348.053927] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2348.054085] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2348.054231] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2348.054468] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2348.054625] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2348.054792] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2348.054953] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2348.055136] env[62875]: DEBUG nova.virt.hardware [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2348.055990] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03dc5df-c0fd-4012-b9d0-0fd71bc1fb1e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.064671] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8409ce73-e292-4187-90a3-d43b9ff9b8cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.164702] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2348.173755] env[62875]: DEBUG oslo_concurrency.lockutils [None req-28b5cf3d-0cd2-4e65-abc6-ea87a5576311 tempest-MultipleCreateTestJSON-911567716 tempest-MultipleCreateTestJSON-911567716-project-member] Lock "ff4b45e4-da16-4943-b7c0-a8d92781e4c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.780s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2348.672219] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2348.744933] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c46d591-e9dd-442b-bb91-0d0589e97829 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.752874] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3047c802-2c92-4807-a53f-eb9c1ebbcfea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.783187] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7555bf-8082-4f03-847d-d089017b940f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.790601] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2744039b-8762-490b-b109-249ea64036bb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.804993] env[62875]: DEBUG nova.compute.provider_tree [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2349.166283] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.311032] env[62875]: DEBUG nova.scheduler.client.report [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2349.666691] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.817023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2349.817508] env[62875]: DEBUG nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2349.820251] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.345s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2349.820454] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2349.820609] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2349.821736] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631c540a-ae83-48a4-9bd2-e0db7df5a130 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.829754] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1ae9f8-54c9-4b50-a45f-a31e91acdeee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.848129] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5eba51-2cff-4b78-af61-d7a5d0205dbe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.854533] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdf8952-11ab-485e-9dd0-d53a89018f6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.883888] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179471MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2349.884299] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2349.884645] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2350.166897] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2350.323043] env[62875]: DEBUG nova.compute.utils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2350.324578] env[62875]: DEBUG nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2350.324730] env[62875]: DEBUG nova.network.neutron [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2350.384984] env[62875]: DEBUG nova.policy [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17f234bbbbc240d190e3dc9fd65b21ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e9bffbf1f46465286d8bc197f4b8c92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2350.667924] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2350.830827] env[62875]: DEBUG nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2350.916027] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 7969485a-ccd6-48e0-bdea-b8920af28843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.916027] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.916027] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 3eef1182-4159-4d57-8e6d-c5a1a50315f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.916027] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 760f10ab-4617-418b-b922-4f9835eb96f4 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.916027] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance b71c432b-80ec-4b08-a62a-b1d5ccc56f86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.916472] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2350.916755] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2350.999034] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70389de6-f4b8-4d30-82f8-c3fbf4143386 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.005723] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9475866f-f723-4abd-b252-80c95d2649b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.035529] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4eddb9-daad-4219-8873-2e39ec345e58 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.042876] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d97f8cb-49d3-4a59-b6e2-0bd2e9b3339f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.057185] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2351.084082] env[62875]: DEBUG nova.network.neutron [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Successfully created port: 98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2351.172810] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.563021] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2351.669092] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.838250] env[62875]: DEBUG nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2351.864855] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2351.865105] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2351.865261] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2351.865470] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2351.865617] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2351.865761] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2351.865963] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2351.866689] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2351.866880] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2351.867061] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2351.867254] env[62875]: DEBUG nova.virt.hardware [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2351.869091] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae2f734-30d9-4457-a328-5cb261c5b648 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.877011] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d30da7-3e11-4996-9a53-e06942727179 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.065889] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2352.066190] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.181s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2352.066350] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2352.067045] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 2352.170343] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.583966] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] There are 53 instances to clean {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 2352.584261] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: ff4b45e4-da16-4943-b7c0-a8d92781e4c0] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2352.670830] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.088544] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 837d85b2-1c61-4286-bd36-49ad273107df] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2353.170646] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.472596] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquiring lock "f1207e40-9d37-4439-a684-fa30c26d088a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2353.472836] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "f1207e40-9d37-4439-a684-fa30c26d088a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2353.593387] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 9e6c5891-a5fe-49f0-94af-6ac6d3704d7d] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2353.671346] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.976973] env[62875]: DEBUG nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2354.096886] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 4356506b-ff11-43bb-84be-35ea0fe90cb1] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2354.171728] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.507160] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2354.507527] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2354.508928] env[62875]: INFO nova.compute.claims [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2354.600635] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 75aaf0b8-1bcd-4dd4-9a5a-3ee9ba0debb3] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2354.673504] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.676346] env[62875]: DEBUG nova.compute.manager [req-df079e30-0e8c-4e53-9f4a-4258d9679e33 req-38bb330f-ff20-4412-acd6-7624469efaf8 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Received event network-vif-plugged-ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2354.677062] env[62875]: DEBUG oslo_concurrency.lockutils [req-df079e30-0e8c-4e53-9f4a-4258d9679e33 req-38bb330f-ff20-4412-acd6-7624469efaf8 service nova] Acquiring lock "760f10ab-4617-418b-b922-4f9835eb96f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2354.677062] env[62875]: DEBUG oslo_concurrency.lockutils [req-df079e30-0e8c-4e53-9f4a-4258d9679e33 req-38bb330f-ff20-4412-acd6-7624469efaf8 service nova] Lock "760f10ab-4617-418b-b922-4f9835eb96f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2354.677062] env[62875]: DEBUG oslo_concurrency.lockutils [req-df079e30-0e8c-4e53-9f4a-4258d9679e33 req-38bb330f-ff20-4412-acd6-7624469efaf8 service nova] Lock "760f10ab-4617-418b-b922-4f9835eb96f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2354.677203] env[62875]: DEBUG nova.compute.manager [req-df079e30-0e8c-4e53-9f4a-4258d9679e33 req-38bb330f-ff20-4412-acd6-7624469efaf8 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] No waiting events found dispatching network-vif-plugged-ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2354.677900] env[62875]: WARNING nova.compute.manager [req-df079e30-0e8c-4e53-9f4a-4258d9679e33 req-38bb330f-ff20-4412-acd6-7624469efaf8 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Received unexpected event network-vif-plugged-ad23a606-6c6c-449c-8874-c8f7ed1cb657 for instance with vm_state building and task_state spawning. [ 2354.809180] env[62875]: DEBUG nova.network.neutron [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Successfully updated port: ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2355.048515] env[62875]: DEBUG nova.network.neutron [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Successfully updated port: 98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2355.104715] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 84eca63b-87dc-4af4-a2dd-f489a06dcd58] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2355.174259] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180649, 'name': CreateVM_Task, 'duration_secs': 8.833813} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.174492] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2355.175214] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2355.175390] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2355.175739] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2355.176011] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40d34c6e-c617-4989-bf54-5a20e5030027 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.180972] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2355.180972] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52317196-3987-a8ed-aba4-3b430356d2c3" [ 2355.180972] env[62875]: _type = "Task" [ 2355.180972] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2355.191323] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52317196-3987-a8ed-aba4-3b430356d2c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.312917] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Acquiring lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2355.313096] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Acquired lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2355.313256] env[62875]: DEBUG nova.network.neutron [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2355.552584] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2355.552696] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquired lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2355.552853] env[62875]: DEBUG nova.network.neutron [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2355.608367] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: f3297565-541b-4a8f-a753-419b6e953ff0] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2355.610861] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ab9dac-ffa7-49c5-9820-2e9f75e4d660 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.619751] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ada373b-e525-45bd-815a-617f5e7bfdd8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.652141] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049c39cc-4fc7-4528-a030-608c7003d892 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.660765] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2be8f66-1452-4b2e-b2a9-0ff48b27e63e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.674831] env[62875]: DEBUG nova.compute.provider_tree [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2355.691541] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52317196-3987-a8ed-aba4-3b430356d2c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009928} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.691842] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2355.692092] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2355.692333] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2355.692482] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2355.692660] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2355.692909] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eac0b47c-f900-4b3a-87ef-688c149f966d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.701999] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2355.702192] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2355.702904] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-366f18b9-1dc7-4ecf-9e54-a7acea3677a4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.708570] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2355.708570] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ae5595-600a-fc20-2d6f-5080b194f30a" [ 2355.708570] env[62875]: _type = "Task" [ 2355.708570] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2355.716645] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ae5595-600a-fc20-2d6f-5080b194f30a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.846367] env[62875]: DEBUG nova.network.neutron [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2355.973108] env[62875]: DEBUG nova.network.neutron [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updating instance_info_cache with network_info: [{"id": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "address": "fa:16:3e:a9:13:20", "network": {"id": "a3bcd1a8-4b9a-4acb-a4bc-4b807b4eda0b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1163142763-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4d3024874d9450e82c83dc6ccc591e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad23a606-6c", "ovs_interfaceid": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2356.084087] env[62875]: DEBUG nova.network.neutron [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2356.115012] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 3458fc2b-a241-4492-9b65-f89b955b8c0b] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2356.177629] env[62875]: DEBUG nova.scheduler.client.report [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2356.218878] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ae5595-600a-fc20-2d6f-5080b194f30a, 'name': SearchDatastore_Task, 'duration_secs': 0.008627} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.219621] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5db1a61-a45f-4c6a-aa1e-0bfe0988259c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.225110] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2356.225110] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c7f1a-f900-92d1-d235-6811355eef59" [ 2356.225110] env[62875]: _type = "Task" [ 2356.225110] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.225890] env[62875]: DEBUG nova.network.neutron [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Updating instance_info_cache with network_info: [{"id": "98207918-3b84-489b-95cc-a52189a1e220", "address": "fa:16:3e:9f:79:cf", "network": {"id": "38266f56-647e-4be5-82cc-b3a63ee6e909", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1848349969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e9bffbf1f46465286d8bc197f4b8c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98207918-3b", "ovs_interfaceid": "98207918-3b84-489b-95cc-a52189a1e220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2356.234396] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c7f1a-f900-92d1-d235-6811355eef59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.475542] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Releasing lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2356.475890] env[62875]: DEBUG nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Instance network_info: |[{"id": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "address": "fa:16:3e:a9:13:20", "network": {"id": "a3bcd1a8-4b9a-4acb-a4bc-4b807b4eda0b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1163142763-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4d3024874d9450e82c83dc6ccc591e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad23a606-6c", "ovs_interfaceid": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2356.476343] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:13:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad23a606-6c6c-449c-8874-c8f7ed1cb657', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2356.483750] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Creating folder: Project (e4d3024874d9450e82c83dc6ccc591e9). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2356.484019] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a42e0bea-f69e-428a-9e9d-81f22ef36ebc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.497483] env[62875]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2356.497646] env[62875]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62875) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2356.497980] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Folder already exists: Project (e4d3024874d9450e82c83dc6ccc591e9). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 2356.498190] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Creating folder: Instances. Parent ref: group-v445002. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2356.498418] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80c2fd0e-a507-40bb-810c-96e17e0430f1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.508353] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Created folder: Instances in parent group-v445002. [ 2356.508593] env[62875]: DEBUG oslo.service.loopingcall [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2356.508783] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2356.508982] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5886aa3a-838f-4c80-b5f6-687ee8963ff9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.527705] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2356.527705] env[62875]: value = "task-2180652" [ 2356.527705] env[62875]: _type = "Task" [ 2356.527705] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.535467] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180652, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.618616] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: d22ff1b8-20e9-4a3d-a9d3-f87b06a30fdf] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2356.682525] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2356.682812] env[62875]: DEBUG nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2356.704525] env[62875]: DEBUG nova.compute.manager [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Received event network-changed-ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2356.704718] env[62875]: DEBUG nova.compute.manager [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Refreshing instance network info cache due to event network-changed-ad23a606-6c6c-449c-8874-c8f7ed1cb657. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2356.704963] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Acquiring lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2356.705123] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Acquired lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2356.705286] env[62875]: DEBUG nova.network.neutron [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Refreshing network info cache for port ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2356.731760] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Releasing lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2356.732100] env[62875]: DEBUG nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Instance network_info: |[{"id": "98207918-3b84-489b-95cc-a52189a1e220", "address": "fa:16:3e:9f:79:cf", "network": {"id": "38266f56-647e-4be5-82cc-b3a63ee6e909", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1848349969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e9bffbf1f46465286d8bc197f4b8c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98207918-3b", "ovs_interfaceid": "98207918-3b84-489b-95cc-a52189a1e220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2356.732747] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:79:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98207918-3b84-489b-95cc-a52189a1e220', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2356.740865] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Creating folder: Project (4e9bffbf1f46465286d8bc197f4b8c92). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2356.741908] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e183114b-ff9f-4fd0-a1f6-d6937e9cf7e4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.747699] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c7f1a-f900-92d1-d235-6811355eef59, 'name': SearchDatastore_Task, 'duration_secs': 0.009035} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.748333] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2356.748632] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 3eef1182-4159-4d57-8e6d-c5a1a50315f4/3eef1182-4159-4d57-8e6d-c5a1a50315f4.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2356.749139] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9b251cd-7f81-4b5d-9e9c-9e714086b404 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.754766] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2356.754766] env[62875]: value = "task-2180654" [ 2356.754766] env[62875]: _type = "Task" [ 2356.754766] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.758872] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Created folder: Project (4e9bffbf1f46465286d8bc197f4b8c92) in parent group-v444854. [ 2356.759145] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Creating folder: Instances. Parent ref: group-v445011. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2356.759675] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32f42a44-0428-4937-9ded-22648adbbcbf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.764073] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.768102] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Created folder: Instances in parent group-v445011. [ 2356.768319] env[62875]: DEBUG oslo.service.loopingcall [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2356.768492] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2356.768678] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6105ef9b-bd90-49cf-b329-e2c5954ac502 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.788143] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2356.788143] env[62875]: value = "task-2180656" [ 2356.788143] env[62875]: _type = "Task" [ 2356.788143] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.795250] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180656, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2357.037511] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180652, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2357.122707] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 9b3628be-b8a3-4105-bc84-088dede23aaf] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2357.187703] env[62875]: DEBUG nova.compute.utils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2357.189033] env[62875]: DEBUG nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2357.189216] env[62875]: DEBUG nova.network.neutron [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2357.246727] env[62875]: DEBUG nova.policy [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51e043294f27400c950e7391138ee3c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '260dc67f6a6343b4a0b7413d8f36294f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2357.264256] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.42212} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2357.264840] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 3eef1182-4159-4d57-8e6d-c5a1a50315f4/3eef1182-4159-4d57-8e6d-c5a1a50315f4.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2357.265071] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2357.265321] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e9f467c-0aec-4e85-b5ce-842e653b434e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.271223] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2357.271223] env[62875]: value = "task-2180657" [ 2357.271223] env[62875]: _type = "Task" [ 2357.271223] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2357.279598] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180657, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2357.296785] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180656, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2357.514760] env[62875]: DEBUG nova.network.neutron [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updated VIF entry in instance network info cache for port ad23a606-6c6c-449c-8874-c8f7ed1cb657. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2357.515241] env[62875]: DEBUG nova.network.neutron [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updating instance_info_cache with network_info: [{"id": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "address": "fa:16:3e:a9:13:20", "network": {"id": "a3bcd1a8-4b9a-4acb-a4bc-4b807b4eda0b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1163142763-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4d3024874d9450e82c83dc6ccc591e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad23a606-6c", "ovs_interfaceid": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2357.539925] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180652, 'name': CreateVM_Task, 'duration_secs': 0.596005} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2357.539925] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2357.539925] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'delete_on_termination': True, 'attachment_id': '3a0caaa0-5cad-4792-bf3f-2bec2bda02ea', 'disk_bus': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445006', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'name': 'volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '760f10ab-4617-418b-b922-4f9835eb96f4', 'attached_at': '', 'detached_at': '', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'serial': '80b0118f-e6e2-416e-bd9d-2cf922e866bc'}, 'mount_device': '/dev/sda', 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=62875) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2357.539925] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Root volume attach. Driver type: vmdk {{(pid=62875) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2357.539925] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1ae2a9-5e90-4c90-b2da-10b372154929 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.548332] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bdff7a-588d-4bbc-bd4e-e898ecf71a38 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.553982] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cce4b8-6577-48f4-b5a6-6a7de27de6d6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.560954] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-46f9fb16-219c-4969-a2c5-1ff133b184e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.567990] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2357.567990] env[62875]: value = "task-2180658" [ 2357.567990] env[62875]: _type = "Task" [ 2357.567990] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2357.575048] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180658, 'name': RelocateVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2357.626145] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 36c909f2-5d06-4a3e-ace2-15d2e36b4a95] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2357.689177] env[62875]: DEBUG nova.network.neutron [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Successfully created port: e3179ec1-3b91-475e-9899-690e70dbe3bd {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2357.691545] env[62875]: DEBUG nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2357.781085] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180657, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063987} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2357.781372] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2357.782160] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794e2b83-bdc5-4519-9e90-2378129e1898 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.804600] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 3eef1182-4159-4d57-8e6d-c5a1a50315f4/3eef1182-4159-4d57-8e6d-c5a1a50315f4.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2357.809555] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0e02d5b-1757-4fdc-b45c-872375608707 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.829704] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180656, 'name': CreateVM_Task, 'duration_secs': 0.526346} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2357.830902] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2357.831157] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2357.831157] env[62875]: value = "task-2180659" [ 2357.831157] env[62875]: _type = "Task" [ 2357.831157] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2357.831808] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2357.831956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2357.832275] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2357.832565] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ce36fe5-34f6-41a8-b188-de01d8c01268 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.842567] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180659, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2357.843461] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2357.843461] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5256f789-469c-0a0c-ba36-f899925f6dca" [ 2357.843461] env[62875]: _type = "Task" [ 2357.843461] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2357.851471] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5256f789-469c-0a0c-ba36-f899925f6dca, 'name': SearchDatastore_Task, 'duration_secs': 0.008718} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2357.851764] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2357.852076] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2357.852334] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2357.852566] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2357.852792] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2357.853060] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ae1131f-31bc-4bdf-9221-b9d1f7a1bdb8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.861633] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2357.861825] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2357.862619] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d20f67fb-1a2c-4386-ad93-f6ccdb30f50d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.867778] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2357.867778] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5279a05a-8639-56ce-2c0f-143b9e8baea2" [ 2357.867778] env[62875]: _type = "Task" [ 2357.867778] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2357.874964] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5279a05a-8639-56ce-2c0f-143b9e8baea2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.018827] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Releasing lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2358.018827] env[62875]: DEBUG nova.compute.manager [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Received event network-vif-plugged-98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2358.018967] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Acquiring lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2358.019172] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2358.019340] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2358.019511] env[62875]: DEBUG nova.compute.manager [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] No waiting events found dispatching network-vif-plugged-98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2358.019675] env[62875]: WARNING nova.compute.manager [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Received unexpected event network-vif-plugged-98207918-3b84-489b-95cc-a52189a1e220 for instance with vm_state building and task_state spawning. [ 2358.019836] env[62875]: DEBUG nova.compute.manager [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Received event network-changed-98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2358.019986] env[62875]: DEBUG nova.compute.manager [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Refreshing instance network info cache due to event network-changed-98207918-3b84-489b-95cc-a52189a1e220. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2358.020182] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Acquiring lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2358.020318] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Acquired lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2358.020472] env[62875]: DEBUG nova.network.neutron [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Refreshing network info cache for port 98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2358.080493] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180658, 'name': RelocateVM_Task, 'duration_secs': 0.385329} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2358.080729] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Volume attach. Driver type: vmdk {{(pid=62875) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2358.080931] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445006', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'name': 'volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '760f10ab-4617-418b-b922-4f9835eb96f4', 'attached_at': '', 'detached_at': '', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'serial': '80b0118f-e6e2-416e-bd9d-2cf922e866bc'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2358.081747] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7d3ba0-ee40-4ce2-9ae9-560409e77883 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.097676] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1557754-cceb-4a32-980b-d6d4af1d54bc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.118627] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc/volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2358.118860] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdafe990-b1f7-49af-8094-665a3fa4832d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.132628] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: f629aa16-0442-4659-9a9c-30f10136ae84] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2358.139277] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2358.139277] env[62875]: value = "task-2180660" [ 2358.139277] env[62875]: _type = "Task" [ 2358.139277] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2358.147152] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.343172] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180659, 'name': ReconfigVM_Task, 'duration_secs': 0.274505} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2358.343580] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 3eef1182-4159-4d57-8e6d-c5a1a50315f4/3eef1182-4159-4d57-8e6d-c5a1a50315f4.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2358.344245] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ee780ef-84e1-4494-a63b-08c1fd69860f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.350129] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2358.350129] env[62875]: value = "task-2180661" [ 2358.350129] env[62875]: _type = "Task" [ 2358.350129] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2358.358011] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180661, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.377204] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5279a05a-8639-56ce-2c0f-143b9e8baea2, 'name': SearchDatastore_Task, 'duration_secs': 0.00957} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2358.377931] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dba07d7-c450-4b03-bbe2-ec7b309cb53f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.382618] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2358.382618] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529c3403-b7cc-28c9-0fc9-74a3217bf465" [ 2358.382618] env[62875]: _type = "Task" [ 2358.382618] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2358.391673] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529c3403-b7cc-28c9-0fc9-74a3217bf465, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.635518] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: d751c6df-1e27-4b6a-a88a-cd15456914a1] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2358.648661] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180660, 'name': ReconfigVM_Task, 'duration_secs': 0.243895} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2358.651170] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Reconfigured VM instance instance-00000060 to attach disk [datastore2] volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc/volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2358.655950] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f197984-b320-4334-9753-e09391608e2f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.671629] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2358.671629] env[62875]: value = "task-2180662" [ 2358.671629] env[62875]: _type = "Task" [ 2358.671629] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2358.679523] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.700277] env[62875]: DEBUG nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2358.726368] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2358.726625] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2358.726786] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2358.727181] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2358.727181] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2358.727271] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2358.727459] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2358.727615] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2358.727783] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2358.727944] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2358.728142] env[62875]: DEBUG nova.virt.hardware [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2358.729057] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10522358-eda7-418a-a1cd-250f670896e6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.737487] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c900ec1a-21fe-4187-9566-8ded2a4837e6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.768401] env[62875]: DEBUG nova.network.neutron [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Updated VIF entry in instance network info cache for port 98207918-3b84-489b-95cc-a52189a1e220. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2358.768401] env[62875]: DEBUG nova.network.neutron [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Updating instance_info_cache with network_info: [{"id": "98207918-3b84-489b-95cc-a52189a1e220", "address": "fa:16:3e:9f:79:cf", "network": {"id": "38266f56-647e-4be5-82cc-b3a63ee6e909", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1848349969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e9bffbf1f46465286d8bc197f4b8c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98207918-3b", "ovs_interfaceid": "98207918-3b84-489b-95cc-a52189a1e220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2358.859673] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180661, 'name': Rename_Task, 'duration_secs': 0.132403} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2358.860112] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2358.860367] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29d1443f-cee4-486b-9a55-0a4f05d81918 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.865986] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2358.865986] env[62875]: value = "task-2180663" [ 2358.865986] env[62875]: _type = "Task" [ 2358.865986] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2358.875245] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180663, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.892197] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529c3403-b7cc-28c9-0fc9-74a3217bf465, 'name': SearchDatastore_Task, 'duration_secs': 0.010824} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2358.892622] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2358.892884] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] b71c432b-80ec-4b08-a62a-b1d5ccc56f86/b71c432b-80ec-4b08-a62a-b1d5ccc56f86.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2358.893187] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f518fef-4d62-40f7-a376-3c85f6e0597c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.899920] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2358.899920] env[62875]: value = "task-2180664" [ 2358.899920] env[62875]: _type = "Task" [ 2358.899920] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2358.908540] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2359.139149] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: c4b43f9a-9c49-4281-a102-5d34f26cc9df] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2359.144617] env[62875]: DEBUG nova.compute.manager [req-128a736c-b81c-43e7-a93d-22b0c4a2aeae req-0b602007-47c5-4ee2-b1df-2efd3f3a5548 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Received event network-vif-plugged-e3179ec1-3b91-475e-9899-690e70dbe3bd {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2359.144842] env[62875]: DEBUG oslo_concurrency.lockutils [req-128a736c-b81c-43e7-a93d-22b0c4a2aeae req-0b602007-47c5-4ee2-b1df-2efd3f3a5548 service nova] Acquiring lock "f1207e40-9d37-4439-a684-fa30c26d088a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.145064] env[62875]: DEBUG oslo_concurrency.lockutils [req-128a736c-b81c-43e7-a93d-22b0c4a2aeae req-0b602007-47c5-4ee2-b1df-2efd3f3a5548 service nova] Lock "f1207e40-9d37-4439-a684-fa30c26d088a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2359.145233] env[62875]: DEBUG oslo_concurrency.lockutils [req-128a736c-b81c-43e7-a93d-22b0c4a2aeae req-0b602007-47c5-4ee2-b1df-2efd3f3a5548 service nova] Lock "f1207e40-9d37-4439-a684-fa30c26d088a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2359.145436] env[62875]: DEBUG nova.compute.manager [req-128a736c-b81c-43e7-a93d-22b0c4a2aeae req-0b602007-47c5-4ee2-b1df-2efd3f3a5548 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] No waiting events found dispatching network-vif-plugged-e3179ec1-3b91-475e-9899-690e70dbe3bd {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2359.145582] env[62875]: WARNING nova.compute.manager [req-128a736c-b81c-43e7-a93d-22b0c4a2aeae req-0b602007-47c5-4ee2-b1df-2efd3f3a5548 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Received unexpected event network-vif-plugged-e3179ec1-3b91-475e-9899-690e70dbe3bd for instance with vm_state building and task_state spawning. [ 2359.181956] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180662, 'name': ReconfigVM_Task, 'duration_secs': 0.129604} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2359.182293] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445006', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'name': 'volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '760f10ab-4617-418b-b922-4f9835eb96f4', 'attached_at': '', 'detached_at': '', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'serial': '80b0118f-e6e2-416e-bd9d-2cf922e866bc'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2359.182865] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4c5af8c-4a5a-4865-8a76-0c4cbf48f05b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.189744] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2359.189744] env[62875]: value = "task-2180665" [ 2359.189744] env[62875]: _type = "Task" [ 2359.189744] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2359.198656] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180665, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2359.247885] env[62875]: DEBUG nova.network.neutron [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Successfully updated port: e3179ec1-3b91-475e-9899-690e70dbe3bd {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2359.270446] env[62875]: DEBUG oslo_concurrency.lockutils [req-a5a50b70-1072-45e2-a790-55b5edb61c0b req-1e5430dc-68c8-45fe-92ce-c207fa77a843 service nova] Releasing lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2359.380136] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180663, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2359.411491] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180664, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2359.643180] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7b3b22c7-26e2-46bf-82b4-8a2b1e68d513] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2359.700372] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180665, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2359.755860] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquiring lock "refresh_cache-f1207e40-9d37-4439-a684-fa30c26d088a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2359.756041] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquired lock "refresh_cache-f1207e40-9d37-4439-a684-fa30c26d088a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2359.756267] env[62875]: DEBUG nova.network.neutron [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2359.876848] env[62875]: DEBUG oslo_vmware.api [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180663, 'name': PowerOnVM_Task, 'duration_secs': 0.740537} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2359.877116] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2359.877324] env[62875]: INFO nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Took 15.71 seconds to spawn the instance on the hypervisor. [ 2359.877502] env[62875]: DEBUG nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2359.878245] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17afdc70-f678-4aa5-ae47-92324fcfcc13 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.909787] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180664, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540618} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2359.910050] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] b71c432b-80ec-4b08-a62a-b1d5ccc56f86/b71c432b-80ec-4b08-a62a-b1d5ccc56f86.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2359.910265] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2359.910505] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87911bd7-148f-492a-81b6-ec880bd523ec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.917890] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2359.917890] env[62875]: value = "task-2180666" [ 2359.917890] env[62875]: _type = "Task" [ 2359.917890] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2359.926071] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.146448] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2a16938e-eeaa-430d-961b-4b060187ba99] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2360.201017] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180665, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.286573] env[62875]: DEBUG nova.network.neutron [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2360.396264] env[62875]: INFO nova.compute.manager [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Took 20.47 seconds to build instance. [ 2360.407953] env[62875]: DEBUG nova.network.neutron [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Updating instance_info_cache with network_info: [{"id": "e3179ec1-3b91-475e-9899-690e70dbe3bd", "address": "fa:16:3e:41:ac:58", "network": {"id": "48c2ab0b-e783-4d7d-bb6b-64c4022f2383", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1531442189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "260dc67f6a6343b4a0b7413d8f36294f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3179ec1-3b", "ovs_interfaceid": "e3179ec1-3b91-475e-9899-690e70dbe3bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2360.426967] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071163} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2360.427264] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2360.428040] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6863e66c-308d-410c-8041-464a7eeee278 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.449547] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] b71c432b-80ec-4b08-a62a-b1d5ccc56f86/b71c432b-80ec-4b08-a62a-b1d5ccc56f86.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2360.449794] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0528b6f-6a23-4a58-9692-3988226fd9d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.468600] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2360.468600] env[62875]: value = "task-2180667" [ 2360.468600] env[62875]: _type = "Task" [ 2360.468600] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.477672] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180667, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.649645] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 37493633-c100-44d8-b1a1-8d462733ba41] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2360.702229] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180665, 'name': Rename_Task, 'duration_secs': 1.22967} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2360.702512] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2360.702752] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70fcc4d7-7fbc-42d3-a51c-de15f8e4c461 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.708992] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2360.708992] env[62875]: value = "task-2180668" [ 2360.708992] env[62875]: _type = "Task" [ 2360.708992] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.716515] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180668, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.898295] env[62875]: DEBUG oslo_concurrency.lockutils [None req-18b99aeb-7965-4c9c-8609-018948e0ea26 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.984s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2360.910052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Releasing lock "refresh_cache-f1207e40-9d37-4439-a684-fa30c26d088a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2360.910417] env[62875]: DEBUG nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Instance network_info: |[{"id": "e3179ec1-3b91-475e-9899-690e70dbe3bd", "address": "fa:16:3e:41:ac:58", "network": {"id": "48c2ab0b-e783-4d7d-bb6b-64c4022f2383", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1531442189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "260dc67f6a6343b4a0b7413d8f36294f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3179ec1-3b", "ovs_interfaceid": "e3179ec1-3b91-475e-9899-690e70dbe3bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2360.910873] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:ac:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2c68e7-b690-42e2-9491-c3f9357cc66a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3179ec1-3b91-475e-9899-690e70dbe3bd', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2360.918397] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Creating folder: Project (260dc67f6a6343b4a0b7413d8f36294f). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2360.918703] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d56b6b8-3ebb-4299-99e8-73dea548d300 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.930410] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Created folder: Project (260dc67f6a6343b4a0b7413d8f36294f) in parent group-v444854. [ 2360.930601] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Creating folder: Instances. Parent ref: group-v445014. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2360.930844] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a4d942f-0e01-42b6-b2aa-acb7e06adcc7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.940974] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Created folder: Instances in parent group-v445014. [ 2360.941435] env[62875]: DEBUG oslo.service.loopingcall [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2360.941669] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2360.942026] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a286453e-7e48-48a9-8a65-4f06538015aa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.961485] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2360.961485] env[62875]: value = "task-2180671" [ 2360.961485] env[62875]: _type = "Task" [ 2360.961485] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.969125] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180671, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.976502] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180667, 'name': ReconfigVM_Task, 'duration_secs': 0.242478} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2360.976761] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Reconfigured VM instance instance-00000061 to attach disk [datastore2] b71c432b-80ec-4b08-a62a-b1d5ccc56f86/b71c432b-80ec-4b08-a62a-b1d5ccc56f86.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2360.977357] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-435a78cf-40b8-408c-a4fb-f6dc8863bd8e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.983507] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2360.983507] env[62875]: value = "task-2180672" [ 2360.983507] env[62875]: _type = "Task" [ 2360.983507] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.992627] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180672, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.153551] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: cb4941dc-1690-46b5-93f9-407198fc1332] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2361.209650] env[62875]: DEBUG nova.compute.manager [req-49633ad5-c105-4474-a587-56572d54c73c req-4792d954-1fd2-402d-8399-7367d663cd76 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Received event network-changed-e3179ec1-3b91-475e-9899-690e70dbe3bd {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2361.209853] env[62875]: DEBUG nova.compute.manager [req-49633ad5-c105-4474-a587-56572d54c73c req-4792d954-1fd2-402d-8399-7367d663cd76 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Refreshing instance network info cache due to event network-changed-e3179ec1-3b91-475e-9899-690e70dbe3bd. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2361.210081] env[62875]: DEBUG oslo_concurrency.lockutils [req-49633ad5-c105-4474-a587-56572d54c73c req-4792d954-1fd2-402d-8399-7367d663cd76 service nova] Acquiring lock "refresh_cache-f1207e40-9d37-4439-a684-fa30c26d088a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2361.210227] env[62875]: DEBUG oslo_concurrency.lockutils [req-49633ad5-c105-4474-a587-56572d54c73c req-4792d954-1fd2-402d-8399-7367d663cd76 service nova] Acquired lock "refresh_cache-f1207e40-9d37-4439-a684-fa30c26d088a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2361.210435] env[62875]: DEBUG nova.network.neutron [req-49633ad5-c105-4474-a587-56572d54c73c req-4792d954-1fd2-402d-8399-7367d663cd76 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Refreshing network info cache for port e3179ec1-3b91-475e-9899-690e70dbe3bd {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2361.220983] env[62875]: DEBUG oslo_vmware.api [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180668, 'name': PowerOnVM_Task, 'duration_secs': 0.453909} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2361.221699] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2361.221937] env[62875]: INFO nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Took 13.17 seconds to spawn the instance on the hypervisor. [ 2361.222138] env[62875]: DEBUG nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2361.222891] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0ecfa2-5ff9-448b-96e0-a1421932be8f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.472746] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180671, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.492755] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180672, 'name': Rename_Task, 'duration_secs': 0.137594} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2361.492992] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2361.493119] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0bf8c9d-2a9c-4702-97cb-d1a913af1397 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.500117] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2361.500117] env[62875]: value = "task-2180673" [ 2361.500117] env[62875]: _type = "Task" [ 2361.500117] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.507763] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.657094] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 5224c475-8739-4137-82e7-c9d149d41d61] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2361.743321] env[62875]: INFO nova.compute.manager [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Took 21.60 seconds to build instance. [ 2361.913180] env[62875]: DEBUG nova.network.neutron [req-49633ad5-c105-4474-a587-56572d54c73c req-4792d954-1fd2-402d-8399-7367d663cd76 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Updated VIF entry in instance network info cache for port e3179ec1-3b91-475e-9899-690e70dbe3bd. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2361.913572] env[62875]: DEBUG nova.network.neutron [req-49633ad5-c105-4474-a587-56572d54c73c req-4792d954-1fd2-402d-8399-7367d663cd76 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Updating instance_info_cache with network_info: [{"id": "e3179ec1-3b91-475e-9899-690e70dbe3bd", "address": "fa:16:3e:41:ac:58", "network": {"id": "48c2ab0b-e783-4d7d-bb6b-64c4022f2383", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1531442189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "260dc67f6a6343b4a0b7413d8f36294f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3179ec1-3b", "ovs_interfaceid": "e3179ec1-3b91-475e-9899-690e70dbe3bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2361.972402] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180671, 'name': CreateVM_Task, 'duration_secs': 0.698393} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2361.972623] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2361.973332] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2361.973646] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2361.974043] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2361.974310] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66b7edcf-391c-4b52-b2a6-738d3a993511 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.979303] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2361.979303] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526d17f6-e84e-77aa-e81a-1053a15784ba" [ 2361.979303] env[62875]: _type = "Task" [ 2361.979303] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.986938] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526d17f6-e84e-77aa-e81a-1053a15784ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.009305] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180673, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.160872] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 85be399c-2482-4a19-b68f-b45aa4e6846b] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2362.247712] env[62875]: DEBUG oslo_concurrency.lockutils [None req-21233133-f842-4e69-b456-c462342b61d0 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "760f10ab-4617-418b-b922-4f9835eb96f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.110s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.416934] env[62875]: DEBUG oslo_concurrency.lockutils [req-49633ad5-c105-4474-a587-56572d54c73c req-4792d954-1fd2-402d-8399-7367d663cd76 service nova] Releasing lock "refresh_cache-f1207e40-9d37-4439-a684-fa30c26d088a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2362.490039] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526d17f6-e84e-77aa-e81a-1053a15784ba, 'name': SearchDatastore_Task, 'duration_secs': 0.019595} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2362.490039] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2362.491040] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2362.491040] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2362.491040] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2362.491040] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2362.491234] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f52779cf-fb06-406e-8eb9-1c201269bf55 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.499514] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2362.499623] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2362.500373] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbac8821-a930-45b9-8f86-a05ee8024f3d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.508643] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2362.508643] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528e71bc-8b43-de7a-51bf-734d7a269071" [ 2362.508643] env[62875]: _type = "Task" [ 2362.508643] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2362.511666] env[62875]: DEBUG oslo_vmware.api [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180673, 'name': PowerOnVM_Task, 'duration_secs': 0.792705} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2362.514563] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2362.514811] env[62875]: INFO nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Took 10.68 seconds to spawn the instance on the hypervisor. [ 2362.515038] env[62875]: DEBUG nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2362.515776] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d48f1e-a92b-491e-ae42-b7182a6433db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.522822] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528e71bc-8b43-de7a-51bf-734d7a269071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.664745] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 79afdeda-8a95-4ad4-ba10-0424cedf1d6f] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2363.022020] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528e71bc-8b43-de7a-51bf-734d7a269071, 'name': SearchDatastore_Task, 'duration_secs': 0.024174} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2363.022867] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-814f2318-e6ba-4994-919c-58a7d868d174 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.028086] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2363.028086] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521d67bc-30a2-82be-1e9d-d6bd1e57653b" [ 2363.028086] env[62875]: _type = "Task" [ 2363.028086] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2363.037713] env[62875]: INFO nova.compute.manager [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Took 20.28 seconds to build instance. [ 2363.041968] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521d67bc-30a2-82be-1e9d-d6bd1e57653b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2363.168287] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2dd748c2-048d-4450-a393-995249a9deb8] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2363.242140] env[62875]: DEBUG nova.compute.manager [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Received event network-changed-ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2363.242471] env[62875]: DEBUG nova.compute.manager [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Refreshing instance network info cache due to event network-changed-ab61f4d3-e2ed-435c-aee6-f21043aa9660. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2363.242804] env[62875]: DEBUG oslo_concurrency.lockutils [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] Acquiring lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2363.243055] env[62875]: DEBUG oslo_concurrency.lockutils [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] Acquired lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2363.243324] env[62875]: DEBUG nova.network.neutron [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Refreshing network info cache for port ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2363.540049] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6be87dc1-5092-491e-a322-bcc3c9eca7f6 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.789s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.540049] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521d67bc-30a2-82be-1e9d-d6bd1e57653b, 'name': SearchDatastore_Task, 'duration_secs': 0.029611} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2363.540049] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2363.540307] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] f1207e40-9d37-4439-a684-fa30c26d088a/f1207e40-9d37-4439-a684-fa30c26d088a.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2363.540739] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8cb34e0b-5ae3-4977-8f31-16abc9c15ae0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.547655] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2363.547655] env[62875]: value = "task-2180674" [ 2363.547655] env[62875]: _type = "Task" [ 2363.547655] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2363.555713] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180674, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2363.672792] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 3a1fbcc7-ad2c-4b10-8d3c-ad56fdc6a3cf] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2364.057554] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180674, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450366} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2364.057816] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] f1207e40-9d37-4439-a684-fa30c26d088a/f1207e40-9d37-4439-a684-fa30c26d088a.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2364.058044] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2364.058299] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-caec314b-0c54-4352-84d1-e43e4a8db6d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.065577] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2364.065577] env[62875]: value = "task-2180675" [ 2364.065577] env[62875]: _type = "Task" [ 2364.065577] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2364.073309] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2364.176712] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: d40aaba6-020d-45b9-83e7-8d7fe382b20f] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2364.211157] env[62875]: DEBUG nova.network.neutron [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updated VIF entry in instance network info cache for port ab61f4d3-e2ed-435c-aee6-f21043aa9660. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2364.212301] env[62875]: DEBUG nova.network.neutron [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updating instance_info_cache with network_info: [{"id": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "address": "fa:16:3e:d7:6b:a0", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab61f4d3-e2", "ovs_interfaceid": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2364.575252] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064115} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2364.575516] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2364.576275] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454661f6-03ee-4521-9571-915561ee01eb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.597460] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] f1207e40-9d37-4439-a684-fa30c26d088a/f1207e40-9d37-4439-a684-fa30c26d088a.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2364.598061] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-661bbd6b-726d-45ad-b4ef-663eccb4743a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.616760] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2364.616760] env[62875]: value = "task-2180676" [ 2364.616760] env[62875]: _type = "Task" [ 2364.616760] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2364.623948] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180676, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2364.684963] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 85f7c7dc-03c4-44ff-8502-cf61ee7c3af9] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2364.714262] env[62875]: DEBUG oslo_concurrency.lockutils [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] Releasing lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2364.714714] env[62875]: DEBUG nova.compute.manager [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Received event network-changed-ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2364.715054] env[62875]: DEBUG nova.compute.manager [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Refreshing instance network info cache due to event network-changed-ad23a606-6c6c-449c-8874-c8f7ed1cb657. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2364.715452] env[62875]: DEBUG oslo_concurrency.lockutils [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] Acquiring lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2364.715730] env[62875]: DEBUG oslo_concurrency.lockutils [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] Acquired lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2364.716051] env[62875]: DEBUG nova.network.neutron [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Refreshing network info cache for port ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2365.129115] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.188917] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 346f4371-3029-4710-9163-08cf36196207] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2365.267298] env[62875]: DEBUG nova.compute.manager [req-4823bb92-1bb1-444c-b888-3d79405a26ea req-613c00d5-b81e-48d6-bb18-8e6b79257554 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Received event network-changed-98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2365.267943] env[62875]: DEBUG nova.compute.manager [req-4823bb92-1bb1-444c-b888-3d79405a26ea req-613c00d5-b81e-48d6-bb18-8e6b79257554 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Refreshing instance network info cache due to event network-changed-98207918-3b84-489b-95cc-a52189a1e220. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2365.268344] env[62875]: DEBUG oslo_concurrency.lockutils [req-4823bb92-1bb1-444c-b888-3d79405a26ea req-613c00d5-b81e-48d6-bb18-8e6b79257554 service nova] Acquiring lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2365.268503] env[62875]: DEBUG oslo_concurrency.lockutils [req-4823bb92-1bb1-444c-b888-3d79405a26ea req-613c00d5-b81e-48d6-bb18-8e6b79257554 service nova] Acquired lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2365.268675] env[62875]: DEBUG nova.network.neutron [req-4823bb92-1bb1-444c-b888-3d79405a26ea req-613c00d5-b81e-48d6-bb18-8e6b79257554 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Refreshing network info cache for port 98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2365.462400] env[62875]: DEBUG nova.network.neutron [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updated VIF entry in instance network info cache for port ad23a606-6c6c-449c-8874-c8f7ed1cb657. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2365.462793] env[62875]: DEBUG nova.network.neutron [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updating instance_info_cache with network_info: [{"id": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "address": "fa:16:3e:a9:13:20", "network": {"id": "a3bcd1a8-4b9a-4acb-a4bc-4b807b4eda0b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1163142763-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4d3024874d9450e82c83dc6ccc591e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad23a606-6c", "ovs_interfaceid": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2365.627213] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180676, 'name': ReconfigVM_Task, 'duration_secs': 0.983787} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2365.627520] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Reconfigured VM instance instance-00000062 to attach disk [datastore2] f1207e40-9d37-4439-a684-fa30c26d088a/f1207e40-9d37-4439-a684-fa30c26d088a.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2365.628174] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8442b703-91db-4ae1-94e2-588dac6dedff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.634517] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2365.634517] env[62875]: value = "task-2180677" [ 2365.634517] env[62875]: _type = "Task" [ 2365.634517] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2365.642843] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180677, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.693806] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 9dd30ca8-bf15-4a87-b055-3575445f4b79] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2365.965834] env[62875]: DEBUG oslo_concurrency.lockutils [req-34d287ad-618a-4c89-a46e-901214a63a58 req-c4de8cbb-6c29-4427-ba4a-a321dad6944b service nova] Releasing lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2365.971369] env[62875]: DEBUG nova.network.neutron [req-4823bb92-1bb1-444c-b888-3d79405a26ea req-613c00d5-b81e-48d6-bb18-8e6b79257554 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Updated VIF entry in instance network info cache for port 98207918-3b84-489b-95cc-a52189a1e220. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2365.971784] env[62875]: DEBUG nova.network.neutron [req-4823bb92-1bb1-444c-b888-3d79405a26ea req-613c00d5-b81e-48d6-bb18-8e6b79257554 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Updating instance_info_cache with network_info: [{"id": "98207918-3b84-489b-95cc-a52189a1e220", "address": "fa:16:3e:9f:79:cf", "network": {"id": "38266f56-647e-4be5-82cc-b3a63ee6e909", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1848349969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e9bffbf1f46465286d8bc197f4b8c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98207918-3b", "ovs_interfaceid": "98207918-3b84-489b-95cc-a52189a1e220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2366.145406] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180677, 'name': Rename_Task, 'duration_secs': 0.124151} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2366.145687] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2366.145920] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0eef90da-82cc-458e-8750-e2f08d03552a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.152134] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2366.152134] env[62875]: value = "task-2180678" [ 2366.152134] env[62875]: _type = "Task" [ 2366.152134] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2366.159692] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2366.197542] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 380229e2-25ba-47cb-a6ca-167b9d9672eb] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2366.474125] env[62875]: DEBUG oslo_concurrency.lockutils [req-4823bb92-1bb1-444c-b888-3d79405a26ea req-613c00d5-b81e-48d6-bb18-8e6b79257554 service nova] Releasing lock "refresh_cache-b71c432b-80ec-4b08-a62a-b1d5ccc56f86" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2366.662292] env[62875]: DEBUG oslo_vmware.api [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180678, 'name': PowerOnVM_Task, 'duration_secs': 0.436478} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2366.662556] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2366.662760] env[62875]: INFO nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Took 7.96 seconds to spawn the instance on the hypervisor. [ 2366.662935] env[62875]: DEBUG nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2366.663749] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40602ada-6d77-477b-84b3-fdbd9962ad6d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.700999] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7f16b893-02e4-4395-b787-f82bc4549e4a] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2367.181681] env[62875]: INFO nova.compute.manager [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Took 12.70 seconds to build instance. [ 2367.203517] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 8f817564-b224-4dcb-bd8c-4d63509a5628] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2367.684444] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3dc14fce-fc8f-4464-bcdf-98cb53b18e65 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "f1207e40-9d37-4439-a684-fa30c26d088a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.211s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2367.707195] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 44a248f6-443c-4b7c-95f0-088f0cdb924d] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2368.209943] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: e811f624-2dda-468c-ab28-9744c300eb1d] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2368.714873] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 76a058aa-9fdf-4a3d-9d1b-a50bb9f61286] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2368.901937] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquiring lock "f1207e40-9d37-4439-a684-fa30c26d088a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2368.902235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "f1207e40-9d37-4439-a684-fa30c26d088a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.902455] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquiring lock "f1207e40-9d37-4439-a684-fa30c26d088a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2368.902645] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "f1207e40-9d37-4439-a684-fa30c26d088a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.902875] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "f1207e40-9d37-4439-a684-fa30c26d088a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2368.905228] env[62875]: INFO nova.compute.manager [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Terminating instance [ 2369.218423] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a19f5bee-ece8-4aa3-8c33-9474da385238] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2369.409742] env[62875]: DEBUG nova.compute.manager [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2369.409981] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2369.410938] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12293012-cb8a-45c7-a7fb-a6ff63b2976d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.418956] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2369.419203] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bafaaffc-f8a8-42eb-8032-4e56b9a11946 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.425953] env[62875]: DEBUG oslo_vmware.api [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2369.425953] env[62875]: value = "task-2180679" [ 2369.425953] env[62875]: _type = "Task" [ 2369.425953] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2369.434662] env[62875]: DEBUG oslo_vmware.api [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2369.722347] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 737c68b0-9ccf-4e0b-a46d-aa78f7981c3d] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2369.935696] env[62875]: DEBUG oslo_vmware.api [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180679, 'name': PowerOffVM_Task, 'duration_secs': 0.18145} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2369.936015] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2369.936319] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2369.936665] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b67a8d0c-3886-4927-9446-1e1994549d54 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.225701] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 45403db3-ff20-42d3-8a37-8db671d8c1fa] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2370.256095] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2370.256318] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2370.256499] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Deleting the datastore file [datastore2] f1207e40-9d37-4439-a684-fa30c26d088a {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2370.256749] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3896d610-3355-46ed-bc93-b09855d6d07f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.263478] env[62875]: DEBUG oslo_vmware.api [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for the task: (returnval){ [ 2370.263478] env[62875]: value = "task-2180681" [ 2370.263478] env[62875]: _type = "Task" [ 2370.263478] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2370.271170] env[62875]: DEBUG oslo_vmware.api [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2370.728968] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 1230e54c-701a-4406-95bd-14e32914bc8d] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2370.773380] env[62875]: DEBUG oslo_vmware.api [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Task: {'id': task-2180681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330358} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2370.773648] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2370.773846] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2370.774032] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2370.774208] env[62875]: INFO nova.compute.manager [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Took 1.36 seconds to destroy the instance on the hypervisor. [ 2370.774440] env[62875]: DEBUG oslo.service.loopingcall [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2370.774650] env[62875]: DEBUG nova.compute.manager [-] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2370.774729] env[62875]: DEBUG nova.network.neutron [-] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2371.002396] env[62875]: DEBUG nova.compute.manager [req-5a6aca67-8c23-49e6-8d45-f3acffea4ab6 req-b2cd8e98-96ef-4778-bbdc-04af4cffb384 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Received event network-vif-deleted-e3179ec1-3b91-475e-9899-690e70dbe3bd {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2371.002607] env[62875]: INFO nova.compute.manager [req-5a6aca67-8c23-49e6-8d45-f3acffea4ab6 req-b2cd8e98-96ef-4778-bbdc-04af4cffb384 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Neutron deleted interface e3179ec1-3b91-475e-9899-690e70dbe3bd; detaching it from the instance and deleting it from the info cache [ 2371.002784] env[62875]: DEBUG nova.network.neutron [req-5a6aca67-8c23-49e6-8d45-f3acffea4ab6 req-b2cd8e98-96ef-4778-bbdc-04af4cffb384 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2371.232272] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 409b6902-f9ef-469b-a9db-4e93f764d199] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2371.487874] env[62875]: DEBUG nova.network.neutron [-] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2371.505804] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd60324c-52a4-4957-b216-fbbf0b46908a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.515384] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c414a597-d7d5-4302-8dfd-d3c2263575c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.541763] env[62875]: DEBUG nova.compute.manager [req-5a6aca67-8c23-49e6-8d45-f3acffea4ab6 req-b2cd8e98-96ef-4778-bbdc-04af4cffb384 service nova] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Detach interface failed, port_id=e3179ec1-3b91-475e-9899-690e70dbe3bd, reason: Instance f1207e40-9d37-4439-a684-fa30c26d088a could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2371.736119] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: c217e435-c5d8-406b-99ee-ec71580fb344] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2371.990963] env[62875]: INFO nova.compute.manager [-] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Took 1.22 seconds to deallocate network for instance. [ 2372.239449] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 70547fbd-7ce8-466e-8abc-b490b8dd6b28] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2372.497729] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2372.498021] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2372.498253] env[62875]: DEBUG nova.objects.instance [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lazy-loading 'resources' on Instance uuid f1207e40-9d37-4439-a684-fa30c26d088a {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2372.743414] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 9e0aaea6-96cf-494d-9f70-a709a47f9772] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2373.078653] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e344934c-63f5-4ac6-abc0-8a0f208af01c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.086572] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277087ce-c25d-45dd-b4f6-ae0727337c74 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.117292] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0559ae35-4f61-4131-ad29-6ff9931226ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.124415] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd34bae5-69e5-45e3-b1e6-becff3cb5e21 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.137164] env[62875]: DEBUG nova.compute.provider_tree [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2373.247134] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2cf54268-5499-49c9-8029-68b3866581d0] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2373.657087] env[62875]: ERROR nova.scheduler.client.report [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] [req-1d3bcf2e-e260-4f97-8893-ae99d047c421] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1d3bcf2e-e260-4f97-8893-ae99d047c421"}]} [ 2373.672911] env[62875]: DEBUG nova.scheduler.client.report [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2373.687020] env[62875]: DEBUG nova.scheduler.client.report [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2373.687253] env[62875]: DEBUG nova.compute.provider_tree [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2373.697137] env[62875]: DEBUG nova.scheduler.client.report [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2373.713097] env[62875]: DEBUG nova.scheduler.client.report [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2373.750203] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 816e0ecb-6476-49bb-9fea-a01067f25b51] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2373.782225] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2974a7-d5c1-4165-8e10-f067022d1dcb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.789645] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f523e812-9168-4aee-8c2d-6e8628d04314 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.820015] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ed6440-063f-4ebc-8d84-50ac47b00db3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.826850] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b938e6f-6fcf-4ba9-b9cf-810eb2edd6e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.839456] env[62875]: DEBUG nova.compute.provider_tree [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2374.253634] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2106a09b-554e-41dd-aa3a-c190b62d0afc] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2374.369841] env[62875]: DEBUG nova.scheduler.client.report [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2374.370129] env[62875]: DEBUG nova.compute.provider_tree [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 119 to 120 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2374.370316] env[62875]: DEBUG nova.compute.provider_tree [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2374.757071] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 8361611a-ad16-43ef-94e0-f2e7e9851682] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2374.875115] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.377s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2374.898127] env[62875]: INFO nova.scheduler.client.report [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Deleted allocations for instance f1207e40-9d37-4439-a684-fa30c26d088a [ 2375.260048] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: d0c4095f-2d78-4055-b568-7e70e7c4c182] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2375.406041] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f180bc1a-60c9-438b-8664-f72bb83ca784 tempest-ServerMetadataTestJSON-424102549 tempest-ServerMetadataTestJSON-424102549-project-member] Lock "f1207e40-9d37-4439-a684-fa30c26d088a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.504s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2375.763056] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 37ae8e69-f953-4846-8a21-fed697ea575a] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2376.266817] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: c1e107cd-5c03-405f-bdae-3281dc4844d5] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2376.771157] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 305aebbe-f983-4826-b8c0-9854458f7d48] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2377.274588] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a64253fe-4ba9-4686-810b-a26a4c29631b] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2377.779073] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: acc78084-21e8-456c-a573-fc5e931147c6] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2378.282394] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: c6de797f-03f7-4dca-9c6a-e7b840990be6] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2378.776024] env[62875]: INFO nova.compute.manager [None req-abed3d47-6685-456e-91be-4028295288bd tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Get console output [ 2378.776024] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-abed3d47-6685-456e-91be-4028295288bd tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] The console log is missing. Check your VSPC configuration [ 2378.785717] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 6f936641-750d-49ae-8beb-bca35305d10d] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2379.288419] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2379.288663] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances with incomplete migration {{(pid=62875) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 2379.792051] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2379.857230] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquiring lock "7969485a-ccd6-48e0-bdea-b8920af28843" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.857477] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "7969485a-ccd6-48e0-bdea-b8920af28843" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.857687] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquiring lock "7969485a-ccd6-48e0-bdea-b8920af28843-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.857876] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "7969485a-ccd6-48e0-bdea-b8920af28843-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.858059] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "7969485a-ccd6-48e0-bdea-b8920af28843-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.859979] env[62875]: INFO nova.compute.manager [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Terminating instance [ 2380.363814] env[62875]: DEBUG nova.compute.manager [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2380.364230] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2380.365614] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5edf57-3aa9-419f-b7ae-6bc87c2e5a6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.376422] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2380.376746] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e74443d-4c98-40df-ab05-17bdbd96fdbd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.383501] env[62875]: DEBUG oslo_vmware.api [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2380.383501] env[62875]: value = "task-2180682" [ 2380.383501] env[62875]: _type = "Task" [ 2380.383501] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2380.391316] env[62875]: DEBUG oslo_vmware.api [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2180682, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2380.893362] env[62875]: DEBUG oslo_vmware.api [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2180682, 'name': PowerOffVM_Task, 'duration_secs': 0.195664} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2380.893624] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2380.893829] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2380.894089] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a84c4e8a-84f5-4645-ab4f-15e9bb088ea5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.247992] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2381.248164] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2381.248303] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Deleting the datastore file [datastore2] 7969485a-ccd6-48e0-bdea-b8920af28843 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2381.248590] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df03085c-fc58-41ac-8576-9c64cbe359f1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.255757] env[62875]: DEBUG oslo_vmware.api [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for the task: (returnval){ [ 2381.255757] env[62875]: value = "task-2180684" [ 2381.255757] env[62875]: _type = "Task" [ 2381.255757] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.263411] env[62875]: DEBUG oslo_vmware.api [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2180684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.765964] env[62875]: DEBUG oslo_vmware.api [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Task: {'id': task-2180684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133819} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2381.766251] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2381.766488] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2381.766686] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2381.766862] env[62875]: INFO nova.compute.manager [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Took 1.40 seconds to destroy the instance on the hypervisor. [ 2381.767116] env[62875]: DEBUG oslo.service.loopingcall [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2381.767310] env[62875]: DEBUG nova.compute.manager [-] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2381.767405] env[62875]: DEBUG nova.network.neutron [-] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2382.179267] env[62875]: DEBUG nova.compute.manager [req-61c24b59-9b5f-4ba4-9979-bc12f256830d req-79218ef5-3d8a-4faa-8fc5-be1c498075bf service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Received event network-vif-deleted-6c87f73d-311a-4ed3-9d9f-5325a201e67e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2382.179267] env[62875]: INFO nova.compute.manager [req-61c24b59-9b5f-4ba4-9979-bc12f256830d req-79218ef5-3d8a-4faa-8fc5-be1c498075bf service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Neutron deleted interface 6c87f73d-311a-4ed3-9d9f-5325a201e67e; detaching it from the instance and deleting it from the info cache [ 2382.179267] env[62875]: DEBUG nova.network.neutron [req-61c24b59-9b5f-4ba4-9979-bc12f256830d req-79218ef5-3d8a-4faa-8fc5-be1c498075bf service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.288782] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.664055] env[62875]: DEBUG nova.network.neutron [-] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.683084] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6bb64aef-b72b-41ba-aecc-0ad8fbe1d942 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.693192] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0aca17-f09f-4c25-a79f-3e4844307b8f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.718348] env[62875]: DEBUG nova.compute.manager [req-61c24b59-9b5f-4ba4-9979-bc12f256830d req-79218ef5-3d8a-4faa-8fc5-be1c498075bf service nova] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Detach interface failed, port_id=6c87f73d-311a-4ed3-9d9f-5325a201e67e, reason: Instance 7969485a-ccd6-48e0-bdea-b8920af28843 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2382.794581] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.795384] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Getting list of instances from cluster (obj){ [ 2382.795384] env[62875]: value = "domain-c8" [ 2382.795384] env[62875]: _type = "ClusterComputeResource" [ 2382.795384] env[62875]: } {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2382.796072] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde92f74-a0c9-49ca-a6a1-9fa1bf63d9c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.808673] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Got total of 4 instances {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2383.166234] env[62875]: INFO nova.compute.manager [-] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Took 1.40 seconds to deallocate network for instance. [ 2383.673053] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2383.673053] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2383.673053] env[62875]: DEBUG nova.objects.instance [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lazy-loading 'resources' on Instance uuid 7969485a-ccd6-48e0-bdea-b8920af28843 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2384.249963] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dcc16f-4a45-4f51-8e63-17b5b602de51 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.258809] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0813814-8561-4e1c-8955-f2e27aae60a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.289484] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c113b190-87ab-458f-8cdf-0173686b0367 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.296822] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b63f3a-773d-47a7-80b3-33b705808db6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.309599] env[62875]: DEBUG nova.compute.provider_tree [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2384.842995] env[62875]: DEBUG nova.scheduler.client.report [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2384.843281] env[62875]: DEBUG nova.compute.provider_tree [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 120 to 121 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2384.843462] env[62875]: DEBUG nova.compute.provider_tree [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2385.348688] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2385.366903] env[62875]: INFO nova.scheduler.client.report [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Deleted allocations for instance 7969485a-ccd6-48e0-bdea-b8920af28843 [ 2385.875262] env[62875]: DEBUG oslo_concurrency.lockutils [None req-12fa27f6-3246-48bc-ad61-6c9b0fb559b3 tempest-AttachInterfacesUnderV243Test-383624950 tempest-AttachInterfacesUnderV243Test-383624950-project-member] Lock "7969485a-ccd6-48e0-bdea-b8920af28843" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.018s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2389.222912] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.706518] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.706518] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2390.702629] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2390.706190] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2392.706103] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2393.927112] env[62875]: INFO nova.compute.manager [None req-2c4f1c5d-15fb-4230-abcf-52b762be8c82 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Get console output [ 2393.927393] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-2c4f1c5d-15fb-4230-abcf-52b762be8c82 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] The console log is missing. Check your VSPC configuration [ 2394.999762] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2395.000091] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2395.000707] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2395.000707] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2395.000707] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2395.003538] env[62875]: INFO nova.compute.manager [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Terminating instance [ 2395.507024] env[62875]: DEBUG nova.compute.manager [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2395.507278] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2395.508345] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2950e0-9211-467b-acc3-408bb07fd447 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.516415] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2395.516640] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfb71ced-d03d-48dd-98d8-2098a745d616 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.523046] env[62875]: DEBUG oslo_vmware.api [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2395.523046] env[62875]: value = "task-2180685" [ 2395.523046] env[62875]: _type = "Task" [ 2395.523046] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2395.531271] env[62875]: DEBUG oslo_vmware.api [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2396.032511] env[62875]: DEBUG oslo_vmware.api [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180685, 'name': PowerOffVM_Task, 'duration_secs': 0.21245} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2396.032851] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2396.033098] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2396.033383] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-932001ce-a605-4597-92e8-388c5500d92d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.248198] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2396.248457] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2396.248641] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleting the datastore file [datastore1] 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2396.248902] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-762c7b67-ff48-4901-956f-991871fa478a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.256750] env[62875]: DEBUG oslo_vmware.api [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2396.256750] env[62875]: value = "task-2180687" [ 2396.256750] env[62875]: _type = "Task" [ 2396.256750] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2396.264160] env[62875]: DEBUG oslo_vmware.api [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180687, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2396.706739] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2396.769880] env[62875]: DEBUG oslo_vmware.api [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180687, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127419} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2396.770260] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2396.770550] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2396.770833] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2396.771133] env[62875]: INFO nova.compute.manager [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2396.771497] env[62875]: DEBUG oslo.service.loopingcall [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2396.771797] env[62875]: DEBUG nova.compute.manager [-] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2396.771953] env[62875]: DEBUG nova.network.neutron [-] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2397.230908] env[62875]: DEBUG nova.compute.manager [req-21aecea2-82cf-4cdf-99fe-967f020ce6a2 req-00225607-9e0f-4be7-9461-29ea8ba61a94 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Received event network-vif-deleted-8ba73130-211b-42d3-b2b1-b1b72ee95433 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2397.231287] env[62875]: INFO nova.compute.manager [req-21aecea2-82cf-4cdf-99fe-967f020ce6a2 req-00225607-9e0f-4be7-9461-29ea8ba61a94 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Neutron deleted interface 8ba73130-211b-42d3-b2b1-b1b72ee95433; detaching it from the instance and deleting it from the info cache [ 2397.231342] env[62875]: DEBUG nova.network.neutron [req-21aecea2-82cf-4cdf-99fe-967f020ce6a2 req-00225607-9e0f-4be7-9461-29ea8ba61a94 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2397.707079] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2397.714686] env[62875]: DEBUG nova.network.neutron [-] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2397.734790] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3941b9b-a94c-4fa9-93f6-2c1e9457f56d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.744579] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56fd905-f621-43d4-9bec-4a51d07dca52 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.768785] env[62875]: DEBUG nova.compute.manager [req-21aecea2-82cf-4cdf-99fe-967f020ce6a2 req-00225607-9e0f-4be7-9461-29ea8ba61a94 service nova] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Detach interface failed, port_id=8ba73130-211b-42d3-b2b1-b1b72ee95433, reason: Instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2398.217904] env[62875]: INFO nova.compute.manager [-] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Took 1.45 seconds to deallocate network for instance. [ 2398.724389] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2398.724744] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2398.724876] env[62875]: DEBUG nova.objects.instance [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lazy-loading 'resources' on Instance uuid 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2399.373440] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8849f5a5-5e48-4286-aba4-c5aca3fda6a6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.381182] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc8d3e8-8f3a-42d0-a35e-6af6bd7dab54 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.410343] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f2aba5-678b-446a-bef4-f22871683e7d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.417452] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21791428-143e-4b0b-b756-891baa2ced36 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.429935] env[62875]: DEBUG nova.compute.provider_tree [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2399.933826] env[62875]: DEBUG nova.scheduler.client.report [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2400.439266] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2400.458701] env[62875]: INFO nova.scheduler.client.report [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleted allocations for instance 67ac6916-04f3-4eb8-b7da-37a5b28b50d9 [ 2400.969046] env[62875]: DEBUG oslo_concurrency.lockutils [None req-c0af4208-a1fa-4275-8b3f-d6b464276fcf tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "67ac6916-04f3-4eb8-b7da-37a5b28b50d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.969s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2401.706288] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2401.706481] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2403.054074] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.054357] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.214533] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 2403.214714] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.556301] env[62875]: DEBUG nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2403.717743] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.717972] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.718164] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.718325] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2403.719527] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2bd92d-205a-4727-8a7a-bcecb6120637 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.728200] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdc0e7a-30b1-45e8-961c-b9c76f7fd525 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.741565] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5c0db8-2b78-4bf2-9d11-bd54f154955c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.747941] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ffe2fd-5eb5-4b38-9e76-199048b84435 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.776824] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180491MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2403.776958] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.777165] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2404.076724] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2404.800879] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 3eef1182-4159-4d57-8e6d-c5a1a50315f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2404.801056] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 760f10ab-4617-418b-b922-4f9835eb96f4 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2404.801192] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance b71c432b-80ec-4b08-a62a-b1d5ccc56f86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.304378] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a5f9b278-6c02-4d5e-997a-97a8fa8944ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2405.304788] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2405.304788] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2405.357040] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96573f7a-ff26-45ef-a89c-c3a84dcb4d2c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.364218] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fbc1aa-cfe6-4ecc-a15a-c58f3a4f26e4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.393511] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156aef58-1fe8-428e-9fa8-549fcdca0dea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.400964] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048b244c-96bd-4041-8865-45056c401358 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.414813] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2405.917700] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2406.422879] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2406.423145] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.646s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2406.423459] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.347s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.425085] env[62875]: INFO nova.compute.claims [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2407.487273] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2944940-cfaf-4331-84ee-7fd5fc7883bf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.494737] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1da8ca-6855-4963-bb04-a34ff289a760 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.523879] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4f2d4e-ba91-4162-8016-05b34d122000 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.530982] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7458645a-3367-4394-a85f-ab1909bf521b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.544703] env[62875]: DEBUG nova.compute.provider_tree [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2408.047790] env[62875]: DEBUG nova.scheduler.client.report [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2408.552789] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.129s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2408.553341] env[62875]: DEBUG nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2409.058981] env[62875]: DEBUG nova.compute.utils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2409.061025] env[62875]: DEBUG nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2409.061025] env[62875]: DEBUG nova.network.neutron [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2409.109179] env[62875]: DEBUG nova.policy [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86e44a4e203b49e09a8f9d2bb45b8079', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95d0f81815ea467cbc1c6160e27409fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2409.385941] env[62875]: DEBUG nova.network.neutron [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Successfully created port: 473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2409.564398] env[62875]: DEBUG nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2410.573977] env[62875]: DEBUG nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2410.598127] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2410.598338] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2410.598499] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2410.598682] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2410.598830] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2410.598975] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2410.599207] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2410.600023] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2410.600023] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2410.600023] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2410.600023] env[62875]: DEBUG nova.virt.hardware [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2410.600752] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0099b3e-42bf-422b-9f63-c025ff27d785 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.608850] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea99ecd-e6c0-4541-876b-02627329dce4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.748756] env[62875]: DEBUG nova.compute.manager [req-b001ed8f-b1f5-46d6-b82d-bda1409aef25 req-486418b1-0c69-45eb-b362-686e0e34ac07 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Received event network-vif-plugged-473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2410.749020] env[62875]: DEBUG oslo_concurrency.lockutils [req-b001ed8f-b1f5-46d6-b82d-bda1409aef25 req-486418b1-0c69-45eb-b362-686e0e34ac07 service nova] Acquiring lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2410.749209] env[62875]: DEBUG oslo_concurrency.lockutils [req-b001ed8f-b1f5-46d6-b82d-bda1409aef25 req-486418b1-0c69-45eb-b362-686e0e34ac07 service nova] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2410.749367] env[62875]: DEBUG oslo_concurrency.lockutils [req-b001ed8f-b1f5-46d6-b82d-bda1409aef25 req-486418b1-0c69-45eb-b362-686e0e34ac07 service nova] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2410.749537] env[62875]: DEBUG nova.compute.manager [req-b001ed8f-b1f5-46d6-b82d-bda1409aef25 req-486418b1-0c69-45eb-b362-686e0e34ac07 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] No waiting events found dispatching network-vif-plugged-473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2410.749703] env[62875]: WARNING nova.compute.manager [req-b001ed8f-b1f5-46d6-b82d-bda1409aef25 req-486418b1-0c69-45eb-b362-686e0e34ac07 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Received unexpected event network-vif-plugged-473d8504-6b5c-40ab-becc-e8cc4c9ea748 for instance with vm_state building and task_state spawning. [ 2410.832861] env[62875]: DEBUG nova.network.neutron [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Successfully updated port: 473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2411.335235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2411.335392] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2411.335533] env[62875]: DEBUG nova.network.neutron [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2411.866796] env[62875]: DEBUG nova.network.neutron [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2411.987948] env[62875]: DEBUG nova.network.neutron [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updating instance_info_cache with network_info: [{"id": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "address": "fa:16:3e:9e:0c:34", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap473d8504-6b", "ovs_interfaceid": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2412.490831] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2412.491212] env[62875]: DEBUG nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Instance network_info: |[{"id": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "address": "fa:16:3e:9e:0c:34", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap473d8504-6b", "ovs_interfaceid": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2412.491659] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:0c:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '473d8504-6b5c-40ab-becc-e8cc4c9ea748', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2412.499427] env[62875]: DEBUG oslo.service.loopingcall [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2412.499639] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2412.500011] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af0b2d2f-fe5a-48ae-94bd-da8a10225465 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.521058] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2412.521058] env[62875]: value = "task-2180688" [ 2412.521058] env[62875]: _type = "Task" [ 2412.521058] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2412.528736] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180688, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2412.777523] env[62875]: DEBUG nova.compute.manager [req-406b6ac5-6453-460b-8b63-cea223647209 req-07736a06-4494-4194-84d0-790982b55ce2 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Received event network-changed-473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2412.777704] env[62875]: DEBUG nova.compute.manager [req-406b6ac5-6453-460b-8b63-cea223647209 req-07736a06-4494-4194-84d0-790982b55ce2 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Refreshing instance network info cache due to event network-changed-473d8504-6b5c-40ab-becc-e8cc4c9ea748. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2412.777942] env[62875]: DEBUG oslo_concurrency.lockutils [req-406b6ac5-6453-460b-8b63-cea223647209 req-07736a06-4494-4194-84d0-790982b55ce2 service nova] Acquiring lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2412.778118] env[62875]: DEBUG oslo_concurrency.lockutils [req-406b6ac5-6453-460b-8b63-cea223647209 req-07736a06-4494-4194-84d0-790982b55ce2 service nova] Acquired lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2412.778298] env[62875]: DEBUG nova.network.neutron [req-406b6ac5-6453-460b-8b63-cea223647209 req-07736a06-4494-4194-84d0-790982b55ce2 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Refreshing network info cache for port 473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2413.031507] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180688, 'name': CreateVM_Task, 'duration_secs': 0.420802} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2413.031780] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2413.032360] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2413.032529] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2413.032860] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2413.033114] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d0e75c4-0ca2-4973-96d5-44c5b42d4506 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.037639] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2413.037639] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b40f3-5858-f066-3e0b-68fd663da8fc" [ 2413.037639] env[62875]: _type = "Task" [ 2413.037639] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2413.045271] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b40f3-5858-f066-3e0b-68fd663da8fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2413.456218] env[62875]: DEBUG nova.network.neutron [req-406b6ac5-6453-460b-8b63-cea223647209 req-07736a06-4494-4194-84d0-790982b55ce2 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updated VIF entry in instance network info cache for port 473d8504-6b5c-40ab-becc-e8cc4c9ea748. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2413.456594] env[62875]: DEBUG nova.network.neutron [req-406b6ac5-6453-460b-8b63-cea223647209 req-07736a06-4494-4194-84d0-790982b55ce2 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updating instance_info_cache with network_info: [{"id": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "address": "fa:16:3e:9e:0c:34", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap473d8504-6b", "ovs_interfaceid": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2413.547631] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b40f3-5858-f066-3e0b-68fd663da8fc, 'name': SearchDatastore_Task, 'duration_secs': 0.012376} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2413.547920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2413.548164] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2413.548472] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2413.548651] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2413.548838] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2413.549098] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac86a851-7fef-46f1-b33b-be4768ae2307 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.556654] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2413.556827] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2413.557516] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d477aa3a-28d1-4f3c-ae2a-a25cfa5e837f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.562317] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2413.562317] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5233c6d1-6dd9-07d8-0c36-14703c57f429" [ 2413.562317] env[62875]: _type = "Task" [ 2413.562317] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2413.569415] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5233c6d1-6dd9-07d8-0c36-14703c57f429, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2413.959931] env[62875]: DEBUG oslo_concurrency.lockutils [req-406b6ac5-6453-460b-8b63-cea223647209 req-07736a06-4494-4194-84d0-790982b55ce2 service nova] Releasing lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2414.072496] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5233c6d1-6dd9-07d8-0c36-14703c57f429, 'name': SearchDatastore_Task, 'duration_secs': 0.008463} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2414.073245] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1756a64-904b-40db-86be-a076e3003c93 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.078092] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2414.078092] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5289aa76-3b58-0da2-1a03-413ee5d44016" [ 2414.078092] env[62875]: _type = "Task" [ 2414.078092] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.085848] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5289aa76-3b58-0da2-1a03-413ee5d44016, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2414.589652] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5289aa76-3b58-0da2-1a03-413ee5d44016, 'name': SearchDatastore_Task, 'duration_secs': 0.009518} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2414.589913] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2414.590205] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] a5f9b278-6c02-4d5e-997a-97a8fa8944ca/a5f9b278-6c02-4d5e-997a-97a8fa8944ca.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2414.590471] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc1f3ccf-6b74-407b-91ba-32e70a25ce8d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.598317] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2414.598317] env[62875]: value = "task-2180689" [ 2414.598317] env[62875]: _type = "Task" [ 2414.598317] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.606658] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2415.107945] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180689, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2415.610299] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513049} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2415.610588] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] a5f9b278-6c02-4d5e-997a-97a8fa8944ca/a5f9b278-6c02-4d5e-997a-97a8fa8944ca.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2415.610799] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2415.611064] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1477b135-29f0-4db6-a763-cdc94400cc85 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.618521] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2415.618521] env[62875]: value = "task-2180690" [ 2415.618521] env[62875]: _type = "Task" [ 2415.618521] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2415.628138] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180690, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.129122] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180690, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124757} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2416.129122] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2416.129690] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd45a4c2-e825-4b44-bcf5-36234f1c168a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.150809] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] a5f9b278-6c02-4d5e-997a-97a8fa8944ca/a5f9b278-6c02-4d5e-997a-97a8fa8944ca.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2416.151048] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fbcb4ca-c3d9-4602-a266-a550bbca84b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.169330] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2416.169330] env[62875]: value = "task-2180691" [ 2416.169330] env[62875]: _type = "Task" [ 2416.169330] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.176719] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180691, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.679210] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180691, 'name': ReconfigVM_Task, 'duration_secs': 0.266458} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2416.679480] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Reconfigured VM instance instance-00000063 to attach disk [datastore2] a5f9b278-6c02-4d5e-997a-97a8fa8944ca/a5f9b278-6c02-4d5e-997a-97a8fa8944ca.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2416.680102] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a5874de-62c5-4d0a-9c63-a802f254dfb2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.685760] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2416.685760] env[62875]: value = "task-2180692" [ 2416.685760] env[62875]: _type = "Task" [ 2416.685760] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.693159] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180692, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2417.195271] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180692, 'name': Rename_Task, 'duration_secs': 0.136383} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2417.195655] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2417.195787] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17478b13-9fbd-45e7-ab75-52f1740d3f71 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.202068] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2417.202068] env[62875]: value = "task-2180693" [ 2417.202068] env[62875]: _type = "Task" [ 2417.202068] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2417.209255] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2417.712293] env[62875]: DEBUG oslo_vmware.api [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180693, 'name': PowerOnVM_Task, 'duration_secs': 0.416126} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2417.712578] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2417.712804] env[62875]: INFO nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Took 7.14 seconds to spawn the instance on the hypervisor. [ 2417.712960] env[62875]: DEBUG nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2417.713723] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8833642-9505-4ac5-bd4c-25800ba8e980 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.229559] env[62875]: INFO nova.compute.manager [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Took 14.17 seconds to build instance. [ 2418.517919] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2418.731384] env[62875]: DEBUG oslo_concurrency.lockutils [None req-79924962-e5d4-438b-b517-13fa676aa443 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.677s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2419.022077] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Getting list of instances from cluster (obj){ [ 2419.022077] env[62875]: value = "domain-c8" [ 2419.022077] env[62875]: _type = "ClusterComputeResource" [ 2419.022077] env[62875]: } {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2419.023170] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40c7c49-57bc-499f-affa-72201de52679 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.035895] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Got total of 4 instances {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2419.036042] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Triggering sync for uuid 3eef1182-4159-4d57-8e6d-c5a1a50315f4 {{(pid=62875) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 2419.036250] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Triggering sync for uuid 760f10ab-4617-418b-b922-4f9835eb96f4 {{(pid=62875) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 2419.036412] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Triggering sync for uuid b71c432b-80ec-4b08-a62a-b1d5ccc56f86 {{(pid=62875) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 2419.036927] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Triggering sync for uuid a5f9b278-6c02-4d5e-997a-97a8fa8944ca {{(pid=62875) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 2419.036927] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.037091] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.037358] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "760f10ab-4617-418b-b922-4f9835eb96f4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.037549] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "760f10ab-4617-418b-b922-4f9835eb96f4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.037773] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.037952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.038194] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.038375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.039192] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225db660-2f7c-4cea-ad58-b91c33346319 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.041994] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d4c1c9-cd21-4fe2-904e-37ac15a9cb02 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.045888] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d1c5f2-4555-4492-8d1c-a05148c2ef65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.049236] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce8d656-45e7-4cef-a569-84b4e3783ff7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.287792] env[62875]: DEBUG nova.compute.manager [req-f422da3c-49b1-4a54-9505-6e2d74e8f791 req-d104b7cd-1532-47d8-a0e8-8e1fb763781c service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Received event network-changed-473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2419.287983] env[62875]: DEBUG nova.compute.manager [req-f422da3c-49b1-4a54-9505-6e2d74e8f791 req-d104b7cd-1532-47d8-a0e8-8e1fb763781c service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Refreshing instance network info cache due to event network-changed-473d8504-6b5c-40ab-becc-e8cc4c9ea748. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2419.288182] env[62875]: DEBUG oslo_concurrency.lockutils [req-f422da3c-49b1-4a54-9505-6e2d74e8f791 req-d104b7cd-1532-47d8-a0e8-8e1fb763781c service nova] Acquiring lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2419.288332] env[62875]: DEBUG oslo_concurrency.lockutils [req-f422da3c-49b1-4a54-9505-6e2d74e8f791 req-d104b7cd-1532-47d8-a0e8-8e1fb763781c service nova] Acquired lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2419.288494] env[62875]: DEBUG nova.network.neutron [req-f422da3c-49b1-4a54-9505-6e2d74e8f791 req-d104b7cd-1532-47d8-a0e8-8e1fb763781c service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Refreshing network info cache for port 473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2419.565767] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.527s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2419.566186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.528s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2419.566506] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.529s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2419.569922] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "760f10ab-4617-418b-b922-4f9835eb96f4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.532s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2420.012986] env[62875]: DEBUG nova.network.neutron [req-f422da3c-49b1-4a54-9505-6e2d74e8f791 req-d104b7cd-1532-47d8-a0e8-8e1fb763781c service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updated VIF entry in instance network info cache for port 473d8504-6b5c-40ab-becc-e8cc4c9ea748. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2420.013387] env[62875]: DEBUG nova.network.neutron [req-f422da3c-49b1-4a54-9505-6e2d74e8f791 req-d104b7cd-1532-47d8-a0e8-8e1fb763781c service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updating instance_info_cache with network_info: [{"id": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "address": "fa:16:3e:9e:0c:34", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap473d8504-6b", "ovs_interfaceid": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2420.159759] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "a9cc9da5-b40c-492d-92a5-85e760290be9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2420.159996] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a9cc9da5-b40c-492d-92a5-85e760290be9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2420.516274] env[62875]: DEBUG oslo_concurrency.lockutils [req-f422da3c-49b1-4a54-9505-6e2d74e8f791 req-d104b7cd-1532-47d8-a0e8-8e1fb763781c service nova] Releasing lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2420.662312] env[62875]: DEBUG nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2421.184899] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2421.185186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2421.186749] env[62875]: INFO nova.compute.claims [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2422.256876] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8397ae8-66a9-4108-80ed-14db710b3daf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.264831] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc36378-7ad0-4265-bbae-6737c15eb959 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.295405] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c05f9e-5d22-4ce9-a986-6d240a681c24 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.302462] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9325734-31fa-4085-a2c9-010df89ad98e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.315134] env[62875]: DEBUG nova.compute.provider_tree [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2422.818511] env[62875]: DEBUG nova.scheduler.client.report [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2423.323859] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.138s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2423.324632] env[62875]: DEBUG nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2423.832045] env[62875]: DEBUG nova.compute.utils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2423.833236] env[62875]: DEBUG nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2423.833875] env[62875]: DEBUG nova.network.neutron [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2423.882287] env[62875]: DEBUG nova.policy [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86e44a4e203b49e09a8f9d2bb45b8079', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95d0f81815ea467cbc1c6160e27409fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2424.174548] env[62875]: DEBUG nova.network.neutron [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Successfully created port: 6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2424.337215] env[62875]: DEBUG nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2425.348061] env[62875]: DEBUG nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2425.375880] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2425.376248] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2425.376480] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2425.376741] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2425.376949] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2425.377177] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2425.377475] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2425.377701] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2425.377934] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2425.378217] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2425.378494] env[62875]: DEBUG nova.virt.hardware [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2425.379744] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d811551-026f-4f20-a709-4276e73048c4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.388464] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64d346c-92f2-479a-94a4-88ab36c89272 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.055131] env[62875]: DEBUG nova.compute.manager [req-b6e6c4a2-afd4-4e1d-be3b-e5436fb8223a req-2fcbeacf-0800-4347-8074-1881bf7ed89e service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Received event network-vif-plugged-6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2433.055420] env[62875]: DEBUG oslo_concurrency.lockutils [req-b6e6c4a2-afd4-4e1d-be3b-e5436fb8223a req-2fcbeacf-0800-4347-8074-1881bf7ed89e service nova] Acquiring lock "a9cc9da5-b40c-492d-92a5-85e760290be9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2433.055523] env[62875]: DEBUG oslo_concurrency.lockutils [req-b6e6c4a2-afd4-4e1d-be3b-e5436fb8223a req-2fcbeacf-0800-4347-8074-1881bf7ed89e service nova] Lock "a9cc9da5-b40c-492d-92a5-85e760290be9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2433.055744] env[62875]: DEBUG oslo_concurrency.lockutils [req-b6e6c4a2-afd4-4e1d-be3b-e5436fb8223a req-2fcbeacf-0800-4347-8074-1881bf7ed89e service nova] Lock "a9cc9da5-b40c-492d-92a5-85e760290be9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2433.055858] env[62875]: DEBUG nova.compute.manager [req-b6e6c4a2-afd4-4e1d-be3b-e5436fb8223a req-2fcbeacf-0800-4347-8074-1881bf7ed89e service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] No waiting events found dispatching network-vif-plugged-6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2433.056178] env[62875]: WARNING nova.compute.manager [req-b6e6c4a2-afd4-4e1d-be3b-e5436fb8223a req-2fcbeacf-0800-4347-8074-1881bf7ed89e service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Received unexpected event network-vif-plugged-6ca62ab4-73e8-4b45-b05b-6e807a8a2515 for instance with vm_state building and task_state spawning. [ 2433.138787] env[62875]: DEBUG nova.network.neutron [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Successfully updated port: 6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2433.670298] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2433.670462] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2433.670620] env[62875]: DEBUG nova.network.neutron [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2434.221518] env[62875]: DEBUG nova.network.neutron [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2434.339575] env[62875]: DEBUG nova.network.neutron [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updating instance_info_cache with network_info: [{"id": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "address": "fa:16:3e:85:f7:a8", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ca62ab4-73", "ovs_interfaceid": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2434.842732] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2434.843076] env[62875]: DEBUG nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Instance network_info: |[{"id": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "address": "fa:16:3e:85:f7:a8", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ca62ab4-73", "ovs_interfaceid": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2434.843512] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:f7:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ca62ab4-73e8-4b45-b05b-6e807a8a2515', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2434.850974] env[62875]: DEBUG oslo.service.loopingcall [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2434.851199] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2434.851431] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b331a7b-91e1-4702-bfb9-8fc9cc966c9c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2434.870404] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2434.870404] env[62875]: value = "task-2180694" [ 2434.870404] env[62875]: _type = "Task" [ 2434.870404] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2434.877566] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180694, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2435.108818] env[62875]: DEBUG nova.compute.manager [req-a7cff152-bb02-435d-9a21-18c8856e2685 req-0b1b5577-ad52-4f70-91d8-eaef7a377b5b service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Received event network-changed-6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2435.109047] env[62875]: DEBUG nova.compute.manager [req-a7cff152-bb02-435d-9a21-18c8856e2685 req-0b1b5577-ad52-4f70-91d8-eaef7a377b5b service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Refreshing instance network info cache due to event network-changed-6ca62ab4-73e8-4b45-b05b-6e807a8a2515. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2435.109278] env[62875]: DEBUG oslo_concurrency.lockutils [req-a7cff152-bb02-435d-9a21-18c8856e2685 req-0b1b5577-ad52-4f70-91d8-eaef7a377b5b service nova] Acquiring lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2435.109427] env[62875]: DEBUG oslo_concurrency.lockutils [req-a7cff152-bb02-435d-9a21-18c8856e2685 req-0b1b5577-ad52-4f70-91d8-eaef7a377b5b service nova] Acquired lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2435.109592] env[62875]: DEBUG nova.network.neutron [req-a7cff152-bb02-435d-9a21-18c8856e2685 req-0b1b5577-ad52-4f70-91d8-eaef7a377b5b service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Refreshing network info cache for port 6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2435.380919] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180694, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2435.798045] env[62875]: DEBUG nova.network.neutron [req-a7cff152-bb02-435d-9a21-18c8856e2685 req-0b1b5577-ad52-4f70-91d8-eaef7a377b5b service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updated VIF entry in instance network info cache for port 6ca62ab4-73e8-4b45-b05b-6e807a8a2515. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2435.798423] env[62875]: DEBUG nova.network.neutron [req-a7cff152-bb02-435d-9a21-18c8856e2685 req-0b1b5577-ad52-4f70-91d8-eaef7a377b5b service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updating instance_info_cache with network_info: [{"id": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "address": "fa:16:3e:85:f7:a8", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ca62ab4-73", "ovs_interfaceid": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2435.881487] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180694, 'name': CreateVM_Task, 'duration_secs': 0.60801} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2435.881672] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2435.882364] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2435.882532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2435.882852] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2435.883116] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-830b9901-08ca-45c3-a62e-4a51e8c09333 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2435.887432] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2435.887432] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528b7bc8-ec2d-9994-14c7-0c35738f7bcd" [ 2435.887432] env[62875]: _type = "Task" [ 2435.887432] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2435.896081] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528b7bc8-ec2d-9994-14c7-0c35738f7bcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2436.300717] env[62875]: DEBUG oslo_concurrency.lockutils [req-a7cff152-bb02-435d-9a21-18c8856e2685 req-0b1b5577-ad52-4f70-91d8-eaef7a377b5b service nova] Releasing lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2436.398291] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528b7bc8-ec2d-9994-14c7-0c35738f7bcd, 'name': SearchDatastore_Task, 'duration_secs': 0.009663} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2436.398593] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2436.398807] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2436.399054] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2436.399210] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2436.399392] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2436.399641] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f1fb8e3-be18-4abd-b99a-0a396f5da590 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2436.407665] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2436.407836] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2436.408615] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee119c77-5be8-4812-adb5-17453e29c5ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2436.413211] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2436.413211] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52192170-528b-a618-410b-43d02d1b5a51" [ 2436.413211] env[62875]: _type = "Task" [ 2436.413211] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2436.420261] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52192170-528b-a618-410b-43d02d1b5a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2436.923305] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52192170-528b-a618-410b-43d02d1b5a51, 'name': SearchDatastore_Task, 'duration_secs': 0.008046} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2436.924112] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c86d0ba-40db-46eb-b12c-66807e4544da {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2436.928943] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2436.928943] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5284734c-e3aa-1fab-0d34-394724e8ce55" [ 2436.928943] env[62875]: _type = "Task" [ 2436.928943] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2436.936163] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5284734c-e3aa-1fab-0d34-394724e8ce55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2437.439335] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5284734c-e3aa-1fab-0d34-394724e8ce55, 'name': SearchDatastore_Task, 'duration_secs': 0.008485} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2437.439687] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2437.439914] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] a9cc9da5-b40c-492d-92a5-85e760290be9/a9cc9da5-b40c-492d-92a5-85e760290be9.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2437.440228] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9b136df-0c9f-4167-8121-8c96119d7f45 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.446277] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2437.446277] env[62875]: value = "task-2180695" [ 2437.446277] env[62875]: _type = "Task" [ 2437.446277] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2437.453631] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2437.955917] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.428001} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2437.956195] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] a9cc9da5-b40c-492d-92a5-85e760290be9/a9cc9da5-b40c-492d-92a5-85e760290be9.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2437.956435] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2437.956692] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e957e139-2ce9-4360-976e-e9983bed762a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.962991] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2437.962991] env[62875]: value = "task-2180696" [ 2437.962991] env[62875]: _type = "Task" [ 2437.962991] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2437.971111] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180696, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2438.473024] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180696, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061209} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2438.473342] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2438.474055] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb5b311-6ef8-4ef8-a4be-ff78723e0fe5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.494954] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] a9cc9da5-b40c-492d-92a5-85e760290be9/a9cc9da5-b40c-492d-92a5-85e760290be9.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2438.495193] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3223a4cc-d0c4-42e0-8c89-0972b06adc9f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.514190] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2438.514190] env[62875]: value = "task-2180697" [ 2438.514190] env[62875]: _type = "Task" [ 2438.514190] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2438.521454] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180697, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2439.024198] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180697, 'name': ReconfigVM_Task, 'duration_secs': 0.244018} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2439.024523] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Reconfigured VM instance instance-00000064 to attach disk [datastore1] a9cc9da5-b40c-492d-92a5-85e760290be9/a9cc9da5-b40c-492d-92a5-85e760290be9.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2439.025153] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a711e62-6998-44e8-aa5c-95366b021823 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.031666] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2439.031666] env[62875]: value = "task-2180698" [ 2439.031666] env[62875]: _type = "Task" [ 2439.031666] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2439.040071] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180698, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2439.541599] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180698, 'name': Rename_Task, 'duration_secs': 0.137271} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2439.541958] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2439.542139] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a34a57d1-0f61-4639-80cf-e66ca417629a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.547898] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2439.547898] env[62875]: value = "task-2180699" [ 2439.547898] env[62875]: _type = "Task" [ 2439.547898] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2439.554979] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180699, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2440.057757] env[62875]: DEBUG oslo_vmware.api [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180699, 'name': PowerOnVM_Task, 'duration_secs': 0.435878} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2440.058014] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2440.058227] env[62875]: INFO nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Took 14.71 seconds to spawn the instance on the hypervisor. [ 2440.058412] env[62875]: DEBUG nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2440.059181] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b96b807-0fd5-420d-aa19-6fe8db71e2fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.577962] env[62875]: INFO nova.compute.manager [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Took 19.41 seconds to build instance. [ 2441.080124] env[62875]: DEBUG oslo_concurrency.lockutils [None req-040a3f85-549f-4692-ad0e-78ffc568d697 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a9cc9da5-b40c-492d-92a5-85e760290be9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.920s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2441.168785] env[62875]: DEBUG nova.compute.manager [req-f4edb4b4-53b9-4aa9-a5e9-f0d75aa0a86e req-f6ed7b17-3c28-4e74-a307-ea98fb6c25c9 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Received event network-changed-473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2441.169135] env[62875]: DEBUG nova.compute.manager [req-f4edb4b4-53b9-4aa9-a5e9-f0d75aa0a86e req-f6ed7b17-3c28-4e74-a307-ea98fb6c25c9 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Refreshing instance network info cache due to event network-changed-473d8504-6b5c-40ab-becc-e8cc4c9ea748. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2441.169397] env[62875]: DEBUG oslo_concurrency.lockutils [req-f4edb4b4-53b9-4aa9-a5e9-f0d75aa0a86e req-f6ed7b17-3c28-4e74-a307-ea98fb6c25c9 service nova] Acquiring lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2441.169554] env[62875]: DEBUG oslo_concurrency.lockutils [req-f4edb4b4-53b9-4aa9-a5e9-f0d75aa0a86e req-f6ed7b17-3c28-4e74-a307-ea98fb6c25c9 service nova] Acquired lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2441.169717] env[62875]: DEBUG nova.network.neutron [req-f4edb4b4-53b9-4aa9-a5e9-f0d75aa0a86e req-f6ed7b17-3c28-4e74-a307-ea98fb6c25c9 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Refreshing network info cache for port 473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2441.868026] env[62875]: DEBUG nova.network.neutron [req-f4edb4b4-53b9-4aa9-a5e9-f0d75aa0a86e req-f6ed7b17-3c28-4e74-a307-ea98fb6c25c9 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updated VIF entry in instance network info cache for port 473d8504-6b5c-40ab-becc-e8cc4c9ea748. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2441.868413] env[62875]: DEBUG nova.network.neutron [req-f4edb4b4-53b9-4aa9-a5e9-f0d75aa0a86e req-f6ed7b17-3c28-4e74-a307-ea98fb6c25c9 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updating instance_info_cache with network_info: [{"id": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "address": "fa:16:3e:9e:0c:34", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap473d8504-6b", "ovs_interfaceid": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2442.204836] env[62875]: DEBUG nova.compute.manager [req-72820ed9-431a-4c37-9e14-dada98526a01 req-b7f62e79-357d-4350-82d3-a6a7d4906e69 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Received event network-changed-6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2442.205092] env[62875]: DEBUG nova.compute.manager [req-72820ed9-431a-4c37-9e14-dada98526a01 req-b7f62e79-357d-4350-82d3-a6a7d4906e69 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Refreshing instance network info cache due to event network-changed-6ca62ab4-73e8-4b45-b05b-6e807a8a2515. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2442.205297] env[62875]: DEBUG oslo_concurrency.lockutils [req-72820ed9-431a-4c37-9e14-dada98526a01 req-b7f62e79-357d-4350-82d3-a6a7d4906e69 service nova] Acquiring lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2442.205401] env[62875]: DEBUG oslo_concurrency.lockutils [req-72820ed9-431a-4c37-9e14-dada98526a01 req-b7f62e79-357d-4350-82d3-a6a7d4906e69 service nova] Acquired lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2442.205560] env[62875]: DEBUG nova.network.neutron [req-72820ed9-431a-4c37-9e14-dada98526a01 req-b7f62e79-357d-4350-82d3-a6a7d4906e69 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Refreshing network info cache for port 6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2442.371336] env[62875]: DEBUG oslo_concurrency.lockutils [req-f4edb4b4-53b9-4aa9-a5e9-f0d75aa0a86e req-f6ed7b17-3c28-4e74-a307-ea98fb6c25c9 service nova] Releasing lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2442.897285] env[62875]: DEBUG nova.network.neutron [req-72820ed9-431a-4c37-9e14-dada98526a01 req-b7f62e79-357d-4350-82d3-a6a7d4906e69 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updated VIF entry in instance network info cache for port 6ca62ab4-73e8-4b45-b05b-6e807a8a2515. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2442.897689] env[62875]: DEBUG nova.network.neutron [req-72820ed9-431a-4c37-9e14-dada98526a01 req-b7f62e79-357d-4350-82d3-a6a7d4906e69 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updating instance_info_cache with network_info: [{"id": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "address": "fa:16:3e:85:f7:a8", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ca62ab4-73", "ovs_interfaceid": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2443.194192] env[62875]: DEBUG nova.compute.manager [req-476cc848-1fd4-44ef-8a3a-8e37af8ba1d7 req-0883c45a-ebf3-4080-a7cd-10e25772aa88 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Received event network-changed-6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2443.194419] env[62875]: DEBUG nova.compute.manager [req-476cc848-1fd4-44ef-8a3a-8e37af8ba1d7 req-0883c45a-ebf3-4080-a7cd-10e25772aa88 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Refreshing instance network info cache due to event network-changed-6ca62ab4-73e8-4b45-b05b-6e807a8a2515. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2443.194618] env[62875]: DEBUG oslo_concurrency.lockutils [req-476cc848-1fd4-44ef-8a3a-8e37af8ba1d7 req-0883c45a-ebf3-4080-a7cd-10e25772aa88 service nova] Acquiring lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2443.399851] env[62875]: DEBUG oslo_concurrency.lockutils [req-72820ed9-431a-4c37-9e14-dada98526a01 req-b7f62e79-357d-4350-82d3-a6a7d4906e69 service nova] Releasing lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2443.400295] env[62875]: DEBUG oslo_concurrency.lockutils [req-476cc848-1fd4-44ef-8a3a-8e37af8ba1d7 req-0883c45a-ebf3-4080-a7cd-10e25772aa88 service nova] Acquired lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2443.400490] env[62875]: DEBUG nova.network.neutron [req-476cc848-1fd4-44ef-8a3a-8e37af8ba1d7 req-0883c45a-ebf3-4080-a7cd-10e25772aa88 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Refreshing network info cache for port 6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2444.087728] env[62875]: DEBUG nova.network.neutron [req-476cc848-1fd4-44ef-8a3a-8e37af8ba1d7 req-0883c45a-ebf3-4080-a7cd-10e25772aa88 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updated VIF entry in instance network info cache for port 6ca62ab4-73e8-4b45-b05b-6e807a8a2515. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2444.088096] env[62875]: DEBUG nova.network.neutron [req-476cc848-1fd4-44ef-8a3a-8e37af8ba1d7 req-0883c45a-ebf3-4080-a7cd-10e25772aa88 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updating instance_info_cache with network_info: [{"id": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "address": "fa:16:3e:85:f7:a8", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ca62ab4-73", "ovs_interfaceid": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2444.229200] env[62875]: DEBUG nova.compute.manager [req-2aa5a99e-2fcf-405a-9776-3b4c6917369c req-91ea473e-b154-4ea2-a44a-4e11cb9351f4 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Received event network-changed-473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2444.229200] env[62875]: DEBUG nova.compute.manager [req-2aa5a99e-2fcf-405a-9776-3b4c6917369c req-91ea473e-b154-4ea2-a44a-4e11cb9351f4 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Refreshing instance network info cache due to event network-changed-473d8504-6b5c-40ab-becc-e8cc4c9ea748. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2444.229374] env[62875]: DEBUG oslo_concurrency.lockutils [req-2aa5a99e-2fcf-405a-9776-3b4c6917369c req-91ea473e-b154-4ea2-a44a-4e11cb9351f4 service nova] Acquiring lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2444.229522] env[62875]: DEBUG oslo_concurrency.lockutils [req-2aa5a99e-2fcf-405a-9776-3b4c6917369c req-91ea473e-b154-4ea2-a44a-4e11cb9351f4 service nova] Acquired lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2444.229683] env[62875]: DEBUG nova.network.neutron [req-2aa5a99e-2fcf-405a-9776-3b4c6917369c req-91ea473e-b154-4ea2-a44a-4e11cb9351f4 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Refreshing network info cache for port 473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2444.590675] env[62875]: DEBUG oslo_concurrency.lockutils [req-476cc848-1fd4-44ef-8a3a-8e37af8ba1d7 req-0883c45a-ebf3-4080-a7cd-10e25772aa88 service nova] Releasing lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2444.927675] env[62875]: DEBUG nova.network.neutron [req-2aa5a99e-2fcf-405a-9776-3b4c6917369c req-91ea473e-b154-4ea2-a44a-4e11cb9351f4 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updated VIF entry in instance network info cache for port 473d8504-6b5c-40ab-becc-e8cc4c9ea748. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2444.928053] env[62875]: DEBUG nova.network.neutron [req-2aa5a99e-2fcf-405a-9776-3b4c6917369c req-91ea473e-b154-4ea2-a44a-4e11cb9351f4 service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updating instance_info_cache with network_info: [{"id": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "address": "fa:16:3e:9e:0c:34", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap473d8504-6b", "ovs_interfaceid": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2445.431393] env[62875]: DEBUG oslo_concurrency.lockutils [req-2aa5a99e-2fcf-405a-9776-3b4c6917369c req-91ea473e-b154-4ea2-a44a-4e11cb9351f4 service nova] Releasing lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2449.706650] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2449.707064] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2450.707601] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2451.702070] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2451.705718] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.706964] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2456.706199] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2459.706399] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2462.707252] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2462.707628] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2462.707628] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 2463.239124] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2463.239278] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2463.239426] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2463.239575] env[62875]: DEBUG nova.objects.instance [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lazy-loading 'info_cache' on Instance uuid 3eef1182-4159-4d57-8e6d-c5a1a50315f4 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2464.955274] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updating instance_info_cache with network_info: [{"id": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "address": "fa:16:3e:d7:6b:a0", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab61f4d3-e2", "ovs_interfaceid": "ab61f4d3-e2ed-435c-aee6-f21043aa9660", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2465.458360] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-3eef1182-4159-4d57-8e6d-c5a1a50315f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2465.458598] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2465.458805] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2465.962496] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2465.962496] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2465.962723] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2465.962723] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2465.963653] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cbd668-21bd-469d-a37a-0e7a5e476e52 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.972109] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c65e9a-cbbf-4978-b952-e76d7e3caf9d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.987051] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f49a8d-f7e1-4b9f-9d41-b84f3f18085f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.993210] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7c16bc-14f0-4c0f-b337-27d81f83a451 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2466.021651] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180291MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2466.021810] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2466.022017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2467.048111] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 3eef1182-4159-4d57-8e6d-c5a1a50315f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2467.048389] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 760f10ab-4617-418b-b922-4f9835eb96f4 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2467.048389] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance b71c432b-80ec-4b08-a62a-b1d5ccc56f86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2467.048517] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a5f9b278-6c02-4d5e-997a-97a8fa8944ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2467.048655] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a9cc9da5-b40c-492d-92a5-85e760290be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2467.048841] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2467.048982] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2467.110308] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46781db2-01d1-4a35-a4aa-5902c62997b1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.117692] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2bd235-b259-4639-bc95-5473a7ee8f33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.148228] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7851ffcb-e31d-4e59-9c69-4c4ecb29ba6d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.155190] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6096a7-6824-41dc-85ab-877480bcef24 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.168086] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2467.689302] env[62875]: ERROR nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [req-0c19ebc1-1dce-4cb4-be40-8131d33c676e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0c19ebc1-1dce-4cb4-be40-8131d33c676e"}]} [ 2467.706077] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2467.718153] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2467.718336] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2467.728604] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2467.747008] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2467.813026] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0f2ab5-e9a4-4e18-8ca1-637df8316b5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.820170] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6baf91d-ba03-43d0-8db3-dc8c66263cad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.850013] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946ec435-b7c5-405f-8b8a-14effa680887 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.856816] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f74c059-f23e-4e3d-bdaa-65320c481833 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.870200] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2468.402078] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2468.402411] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 123 to 124 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2468.402553] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2468.907501] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2468.907748] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.886s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2473.903071] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.707697] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.708137] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.708137] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2513.702319] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2513.705944] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2513.706150] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2518.706291] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2520.708540] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2523.707788] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2523.707788] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2524.239447] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2524.239626] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2524.239729] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2525.459935] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updating instance_info_cache with network_info: [{"id": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "address": "fa:16:3e:a9:13:20", "network": {"id": "a3bcd1a8-4b9a-4acb-a4bc-4b807b4eda0b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1163142763-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4d3024874d9450e82c83dc6ccc591e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad23a606-6c", "ovs_interfaceid": "ad23a606-6c6c-449c-8874-c8f7ed1cb657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2525.963162] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-760f10ab-4617-418b-b922-4f9835eb96f4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2525.963391] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2525.963638] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2526.467061] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2526.467344] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2526.467511] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2526.467694] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2526.468633] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45564cbf-2428-48c2-a727-d9c54bc37515 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2526.476785] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f664dc-0bce-4dce-a7f2-e999c81552ef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2526.491317] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffdd67a-2e91-4232-8a7f-fd232f9e626d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2526.497296] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caddc272-ac4d-46b3-a535-8befa78a9ec6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2526.525469] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180091MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2526.525618] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2526.525808] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2527.548774] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 3eef1182-4159-4d57-8e6d-c5a1a50315f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2527.549045] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 760f10ab-4617-418b-b922-4f9835eb96f4 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2527.549096] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance b71c432b-80ec-4b08-a62a-b1d5ccc56f86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2527.549186] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a5f9b278-6c02-4d5e-997a-97a8fa8944ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2527.549305] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a9cc9da5-b40c-492d-92a5-85e760290be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2527.549484] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2527.549620] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2527.607283] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c965b196-7d8d-48bf-bee8-b48ae850dddd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.614617] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92006734-1d74-4c83-91ca-aacbe436bc57 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.645587] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ad2f3d-292a-409f-a8c6-d97bf0a7c64a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.652221] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d53c70-22eb-43ea-a6a7-c21825130c49 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.664697] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2528.168200] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2528.169473] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2528.169664] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.644s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2572.037938] env[62875]: INFO nova.compute.manager [None req-a2a33257-6d3f-40b2-8f4d-8b250891da08 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Get console output [ 2572.038284] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-a2a33257-6d3f-40b2-8f4d-8b250891da08 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] The console log is missing. Check your VSPC configuration [ 2572.912936] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2572.913150] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2572.913304] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2573.610144] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2573.610506] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2573.610653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2573.610837] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2573.611022] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2573.613610] env[62875]: INFO nova.compute.manager [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Terminating instance [ 2573.707161] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2573.748801] env[62875]: INFO nova.compute.manager [None req-f46eca96-0141-4691-9489-c1a7a9dd450c tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Get console output [ 2573.749039] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-f46eca96-0141-4691-9489-c1a7a9dd450c tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] The console log is missing. Check your VSPC configuration [ 2574.117200] env[62875]: DEBUG nova.compute.manager [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2574.117451] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2574.118386] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb76c83-938d-46b9-93fc-9cd63b454dbc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2574.126446] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2574.126660] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a486398e-1424-4e76-ba59-5405766d8ce5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2574.133241] env[62875]: DEBUG oslo_vmware.api [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2574.133241] env[62875]: value = "task-2180700" [ 2574.133241] env[62875]: _type = "Task" [ 2574.133241] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2574.141261] env[62875]: DEBUG oslo_vmware.api [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180700, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2574.643200] env[62875]: DEBUG oslo_vmware.api [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180700, 'name': PowerOffVM_Task, 'duration_secs': 0.185046} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2574.643573] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2574.643698] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2574.643985] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac8b62a0-cbb5-4e0b-8aaa-2dbe4cebbc4f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2574.656678] env[62875]: INFO nova.compute.manager [None req-979626e4-3b58-44dc-a419-24220192678c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Get console output [ 2574.656843] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-979626e4-3b58-44dc-a419-24220192678c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] The console log is missing. Check your VSPC configuration [ 2574.701403] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2574.705959] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2574.736684] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2574.736903] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2574.737097] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleting the datastore file [datastore2] 3eef1182-4159-4d57-8e6d-c5a1a50315f4 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2574.737344] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a565102-d39c-4487-900e-d933a9039182 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2574.744171] env[62875]: DEBUG oslo_vmware.api [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2574.744171] env[62875]: value = "task-2180702" [ 2574.744171] env[62875]: _type = "Task" [ 2574.744171] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2574.751735] env[62875]: DEBUG oslo_vmware.api [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2574.817845] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Acquiring lock "760f10ab-4617-418b-b922-4f9835eb96f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2574.818099] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "760f10ab-4617-418b-b922-4f9835eb96f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2574.818334] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Acquiring lock "760f10ab-4617-418b-b922-4f9835eb96f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2574.818522] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "760f10ab-4617-418b-b922-4f9835eb96f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2574.818689] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "760f10ab-4617-418b-b922-4f9835eb96f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2574.820763] env[62875]: INFO nova.compute.manager [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Terminating instance [ 2575.254704] env[62875]: DEBUG oslo_vmware.api [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180702, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126821} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2575.255453] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2575.255453] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2575.255453] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2575.255620] env[62875]: INFO nova.compute.manager [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2575.255731] env[62875]: DEBUG oslo.service.loopingcall [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2575.255933] env[62875]: DEBUG nova.compute.manager [-] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2575.256045] env[62875]: DEBUG nova.network.neutron [-] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2575.324129] env[62875]: DEBUG nova.compute.manager [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2575.324417] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2575.324712] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bc2f11e-b28c-441c-bb04-59a9a3580ddd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.332450] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2575.332450] env[62875]: value = "task-2180703" [ 2575.332450] env[62875]: _type = "Task" [ 2575.332450] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2575.340725] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2575.721156] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2575.721156] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2575.724371] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2575.724371] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2575.724371] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2575.724563] env[62875]: INFO nova.compute.manager [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Terminating instance [ 2575.843207] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180703, 'name': PowerOffVM_Task, 'duration_secs': 0.191887} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2575.843932] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2575.844149] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Volume detach. Driver type: vmdk {{(pid=62875) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2575.844343] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445006', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'name': 'volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '760f10ab-4617-418b-b922-4f9835eb96f4', 'attached_at': '', 'detached_at': '', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'serial': '80b0118f-e6e2-416e-bd9d-2cf922e866bc'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2575.845119] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ddba5e-e337-4c31-a7b0-7ecf4c6369dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.864730] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afc762c-f8db-4143-ae52-eaeb8bb3eaf6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.868381] env[62875]: DEBUG nova.compute.manager [req-adba9f36-1ae3-4788-8320-ce8a60a400bf req-6635ffbf-10d8-4ef0-9b3f-d0a6455e0641 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Received event network-vif-deleted-ab61f4d3-e2ed-435c-aee6-f21043aa9660 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2575.868572] env[62875]: INFO nova.compute.manager [req-adba9f36-1ae3-4788-8320-ce8a60a400bf req-6635ffbf-10d8-4ef0-9b3f-d0a6455e0641 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Neutron deleted interface ab61f4d3-e2ed-435c-aee6-f21043aa9660; detaching it from the instance and deleting it from the info cache [ 2575.868742] env[62875]: DEBUG nova.network.neutron [req-adba9f36-1ae3-4788-8320-ce8a60a400bf req-6635ffbf-10d8-4ef0-9b3f-d0a6455e0641 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2575.875035] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce8c3ae-d421-420e-bae8-8ab541fbc645 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.893586] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f439c3c-8ace-4f8e-ba24-2f0e5c9b4342 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.908722] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] The volume has not been displaced from its original location: [datastore2] volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc/volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc.vmdk. No consolidation needed. {{(pid=62875) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2575.913820] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2575.914474] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ef4b822-5eef-4304-8dc0-5d5bb6acae40 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.932690] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2575.932690] env[62875]: value = "task-2180704" [ 2575.932690] env[62875]: _type = "Task" [ 2575.932690] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2575.940499] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180704, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2576.228812] env[62875]: DEBUG nova.compute.manager [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2576.229058] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2576.230355] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90d9bf3-b4de-4bf2-aed7-78910ad8c7ae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.237678] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2576.237901] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb5b76d2-e57d-4e1f-9d03-aa397926eb8f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.243269] env[62875]: DEBUG oslo_vmware.api [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2576.243269] env[62875]: value = "task-2180705" [ 2576.243269] env[62875]: _type = "Task" [ 2576.243269] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2576.250371] env[62875]: DEBUG oslo_vmware.api [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2576.338930] env[62875]: DEBUG nova.network.neutron [-] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2576.371311] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c05011c-d0b5-4fbb-8b63-511738a85185 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.383424] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5dae427-f99e-4f87-980f-9a77ae257fe7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.409964] env[62875]: DEBUG nova.compute.manager [req-adba9f36-1ae3-4788-8320-ce8a60a400bf req-6635ffbf-10d8-4ef0-9b3f-d0a6455e0641 service nova] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Detach interface failed, port_id=ab61f4d3-e2ed-435c-aee6-f21043aa9660, reason: Instance 3eef1182-4159-4d57-8e6d-c5a1a50315f4 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2576.442429] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180704, 'name': ReconfigVM_Task, 'duration_secs': 0.184229} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2576.442704] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2576.447578] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b766ac3-66d5-4879-95d2-e851b1260c6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.463824] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2576.463824] env[62875]: value = "task-2180706" [ 2576.463824] env[62875]: _type = "Task" [ 2576.463824] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2576.472803] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180706, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2576.753188] env[62875]: DEBUG oslo_vmware.api [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180705, 'name': PowerOffVM_Task, 'duration_secs': 0.195741} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2576.753490] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2576.753627] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2576.753870] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7398132-d9c3-4d42-b4af-01c905350171 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.842158] env[62875]: INFO nova.compute.manager [-] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Took 1.59 seconds to deallocate network for instance. [ 2576.916016] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2576.916272] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2576.916625] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Deleting the datastore file [datastore2] b71c432b-80ec-4b08-a62a-b1d5ccc56f86 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2576.916716] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a807879-4eed-40f6-8b06-1174933254c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.922899] env[62875]: DEBUG oslo_vmware.api [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2576.922899] env[62875]: value = "task-2180708" [ 2576.922899] env[62875]: _type = "Task" [ 2576.922899] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2576.930227] env[62875]: DEBUG oslo_vmware.api [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2576.973077] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180706, 'name': ReconfigVM_Task, 'duration_secs': 0.142833} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2576.973374] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445006', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'name': 'volume-80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '760f10ab-4617-418b-b922-4f9835eb96f4', 'attached_at': '', 'detached_at': '', 'volume_id': '80b0118f-e6e2-416e-bd9d-2cf922e866bc', 'serial': '80b0118f-e6e2-416e-bd9d-2cf922e866bc'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2576.973641] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2576.974392] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d940bc5-63b4-4829-ac85-9e05da31bb67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.980551] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2576.980810] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc77e64a-67a2-4d75-b5b0-28d70246ab70 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.124922] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2577.125123] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2577.125312] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Deleting the datastore file [datastore2] 760f10ab-4617-418b-b922-4f9835eb96f4 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2577.125583] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac789457-f992-48a3-888c-40bed69f6c9c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.132372] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for the task: (returnval){ [ 2577.132372] env[62875]: value = "task-2180710" [ 2577.132372] env[62875]: _type = "Task" [ 2577.132372] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2577.139722] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2577.348796] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2577.349076] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2577.349308] env[62875]: DEBUG nova.objects.instance [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lazy-loading 'resources' on Instance uuid 3eef1182-4159-4d57-8e6d-c5a1a50315f4 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2577.432587] env[62875]: DEBUG oslo_vmware.api [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180708, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128482} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2577.432791] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2577.432977] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2577.433179] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2577.433352] env[62875]: INFO nova.compute.manager [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Took 1.20 seconds to destroy the instance on the hypervisor. [ 2577.433584] env[62875]: DEBUG oslo.service.loopingcall [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2577.433764] env[62875]: DEBUG nova.compute.manager [-] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2577.433858] env[62875]: DEBUG nova.network.neutron [-] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2577.643031] env[62875]: DEBUG oslo_vmware.api [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Task: {'id': task-2180710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076546} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2577.643310] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2577.643499] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2577.643676] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2577.643851] env[62875]: INFO nova.compute.manager [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Took 2.32 seconds to destroy the instance on the hypervisor. [ 2577.644130] env[62875]: DEBUG oslo.service.loopingcall [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2577.644331] env[62875]: DEBUG nova.compute.manager [-] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2577.644427] env[62875]: DEBUG nova.network.neutron [-] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2577.948923] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f638069-d09a-43f8-85be-b4225b0473fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.956560] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ea9c1f-f05c-4e15-a401-3d84fd362729 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.989098] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fbf004-13b7-498c-83d7-9086214b84a6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.996676] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739a641b-64f9-4a36-887e-73ddded2cfa4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2578.010182] env[62875]: DEBUG nova.compute.provider_tree [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2578.123915] env[62875]: DEBUG nova.compute.manager [req-c67c1c99-86f1-495c-aadb-f44e7cd85b1d req-b2d99ad0-689f-4f02-92ed-cab5551b7e65 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Received event network-vif-deleted-98207918-3b84-489b-95cc-a52189a1e220 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2578.124160] env[62875]: INFO nova.compute.manager [req-c67c1c99-86f1-495c-aadb-f44e7cd85b1d req-b2d99ad0-689f-4f02-92ed-cab5551b7e65 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Neutron deleted interface 98207918-3b84-489b-95cc-a52189a1e220; detaching it from the instance and deleting it from the info cache [ 2578.124345] env[62875]: DEBUG nova.network.neutron [req-c67c1c99-86f1-495c-aadb-f44e7cd85b1d req-b2d99ad0-689f-4f02-92ed-cab5551b7e65 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2578.542267] env[62875]: DEBUG nova.scheduler.client.report [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2578.542550] env[62875]: DEBUG nova.compute.provider_tree [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 124 to 125 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2578.542734] env[62875]: DEBUG nova.compute.provider_tree [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2578.603064] env[62875]: DEBUG nova.network.neutron [-] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2578.626702] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0fc37986-66a8-4a04-b2af-fa5636356959 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2578.636696] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff300f1-74ba-41e4-8ce4-5a7561583666 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2578.662818] env[62875]: DEBUG nova.compute.manager [req-c67c1c99-86f1-495c-aadb-f44e7cd85b1d req-b2d99ad0-689f-4f02-92ed-cab5551b7e65 service nova] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Detach interface failed, port_id=98207918-3b84-489b-95cc-a52189a1e220, reason: Instance b71c432b-80ec-4b08-a62a-b1d5ccc56f86 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2578.757590] env[62875]: DEBUG nova.network.neutron [-] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2579.047954] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2579.071209] env[62875]: INFO nova.scheduler.client.report [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleted allocations for instance 3eef1182-4159-4d57-8e6d-c5a1a50315f4 [ 2579.105346] env[62875]: INFO nova.compute.manager [-] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Took 1.67 seconds to deallocate network for instance. [ 2579.260837] env[62875]: INFO nova.compute.manager [-] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Took 1.62 seconds to deallocate network for instance. [ 2579.578848] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ee61e348-e250-4248-9718-a597222b207c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "3eef1182-4159-4d57-8e6d-c5a1a50315f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.968s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2579.612565] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2579.612922] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2579.613314] env[62875]: DEBUG nova.objects.instance [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lazy-loading 'resources' on Instance uuid b71c432b-80ec-4b08-a62a-b1d5ccc56f86 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2579.827184] env[62875]: INFO nova.compute.manager [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Took 0.57 seconds to detach 1 volumes for instance. [ 2579.831878] env[62875]: DEBUG nova.compute.manager [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Deleting volume: 80b0118f-e6e2-416e-bd9d-2cf922e866bc {{(pid=62875) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2580.152008] env[62875]: DEBUG nova.compute.manager [req-88f54bac-4960-4a3c-8f39-583cdea14390 req-4bc5f4e9-3ded-4272-bfcf-cf4b7ed4ef82 service nova] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Received event network-vif-deleted-ad23a606-6c6c-449c-8874-c8f7ed1cb657 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2580.181143] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7db8609-a447-4983-9d6d-071f9e1a86b5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.189263] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d169076d-59b6-44b3-a641-d4da98392aee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.220487] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091679e7-67ef-4957-9cbf-38e71e9a2b85 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.228285] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eafde36-bb77-4f30-9024-c05c19de6a02 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.242513] env[62875]: DEBUG nova.compute.provider_tree [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2580.374421] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2580.705794] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2580.706102] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2580.745425] env[62875]: DEBUG nova.scheduler.client.report [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2581.250796] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2581.253340] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.879s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2581.253584] env[62875]: DEBUG nova.objects.instance [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lazy-loading 'resources' on Instance uuid 760f10ab-4617-418b-b922-4f9835eb96f4 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2581.267344] env[62875]: INFO nova.scheduler.client.report [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Deleted allocations for instance b71c432b-80ec-4b08-a62a-b1d5ccc56f86 [ 2581.624339] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2581.624578] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2581.774634] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2b0d5082-1202-4321-965c-d484fd6e8409 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "b71c432b-80ec-4b08-a62a-b1d5ccc56f86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.054s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2581.816201] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63880d3e-df95-41d7-9ef9-1666244b38ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.824837] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bdcbfb-8942-4ee6-8cc2-88b8a6534986 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.856308] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29205ac5-2876-4c9c-ba58-8572fef8e4a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.863466] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6e30e5-14c6-4cd5-ad4f-aec70c555b7f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.876490] env[62875]: DEBUG nova.compute.provider_tree [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2582.126731] env[62875]: DEBUG nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2582.380046] env[62875]: DEBUG nova.scheduler.client.report [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2582.649280] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2582.885409] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.632s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2582.887750] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.239s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2582.889285] env[62875]: INFO nova.compute.claims [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2582.902352] env[62875]: INFO nova.scheduler.client.report [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Deleted allocations for instance 760f10ab-4617-418b-b922-4f9835eb96f4 [ 2583.408892] env[62875]: DEBUG oslo_concurrency.lockutils [None req-2054213a-d070-428b-b345-8506ce6745b3 tempest-ServerActionsV293TestJSON-695969953 tempest-ServerActionsV293TestJSON-695969953-project-member] Lock "760f10ab-4617-418b-b922-4f9835eb96f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.591s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2583.805648] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2583.805966] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2583.963192] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b182d00c-ffd8-4efd-8135-2577a88e6cef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2583.971187] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f32fdc-b5ad-4779-83e7-373335c48e71 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.003765] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02025740-bfd3-4713-9c36-40b0def98f1e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.011432] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd11d1b5-78b2-414a-b3df-b4b7122665f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.024382] env[62875]: DEBUG nova.compute.provider_tree [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2584.309365] env[62875]: DEBUG nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2584.531027] env[62875]: DEBUG nova.scheduler.client.report [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2584.831255] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2585.036031] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2585.036031] env[62875]: DEBUG nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2585.037532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.207s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2585.039415] env[62875]: INFO nova.compute.claims [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2585.544051] env[62875]: DEBUG nova.compute.utils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2585.547441] env[62875]: DEBUG nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2585.547610] env[62875]: DEBUG nova.network.neutron [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2585.590490] env[62875]: DEBUG nova.policy [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '791a39d3328040d5aa1140485a997d43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b93283112aad44f4833c1cc017a566db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2585.707091] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2585.707354] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2585.856237] env[62875]: DEBUG nova.network.neutron [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Successfully created port: 4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2586.048662] env[62875]: DEBUG nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2586.127716] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385187e8-65e3-4a65-af8d-8d4676dc0477 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2586.135048] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7717a2db-1e6e-4993-93f1-65537b00eae1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2586.167174] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad93e27-bc20-421a-ba8a-025ced18925b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2586.174780] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5335954-8d64-4bb0-af43-3c0f2d89200c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2586.188093] env[62875]: DEBUG nova.compute.provider_tree [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2586.691219] env[62875]: DEBUG nova.scheduler.client.report [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2586.740274] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2586.740466] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2586.740616] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2587.064316] env[62875]: DEBUG nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2587.091234] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2587.091486] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2587.091642] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2587.091826] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2587.091973] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2587.092137] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2587.092347] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2587.092506] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2587.092674] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2587.092838] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2587.093017] env[62875]: DEBUG nova.virt.hardware [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2587.093876] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b29394-6a0d-41c0-86c5-da4433d17caa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2587.102439] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f561f963-effd-4b7f-bcf4-6a076f0e86dc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2587.196565] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.159s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2587.197141] env[62875]: DEBUG nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2587.702529] env[62875]: DEBUG nova.compute.utils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2587.704218] env[62875]: DEBUG nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2587.704444] env[62875]: DEBUG nova.network.neutron [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2587.760743] env[62875]: DEBUG nova.policy [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17f234bbbbc240d190e3dc9fd65b21ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e9bffbf1f46465286d8bc197f4b8c92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2587.989508] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updating instance_info_cache with network_info: [{"id": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "address": "fa:16:3e:9e:0c:34", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap473d8504-6b", "ovs_interfaceid": "473d8504-6b5c-40ab-becc-e8cc4c9ea748", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2588.054959] env[62875]: DEBUG nova.network.neutron [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Successfully created port: 70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2588.208034] env[62875]: DEBUG nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2588.301241] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "894b9113-47ae-4b50-ae42-682be81324ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2588.301478] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "894b9113-47ae-4b50-ae42-682be81324ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2588.492956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-a5f9b278-6c02-4d5e-997a-97a8fa8944ca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2588.492956] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2588.492956] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2588.803933] env[62875]: DEBUG nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2588.999028] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2588.999028] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2588.999028] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2588.999028] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2588.999028] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d720ae5f-cf0c-4be7-9384-f89e27789990 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2589.008335] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1542011-d169-421e-8e6f-1e60114e807b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2589.023377] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7928a5-fd55-4430-8e64-8d72d0e65673 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2589.029658] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794b170b-ab2d-4ca0-950c-0c4d2bedabb6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2589.058837] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180142MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2589.059026] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2589.059267] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2589.218590] env[62875]: DEBUG nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2589.246555] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2589.246790] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2589.246948] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2589.247194] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2589.247367] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2589.247517] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2589.247719] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2589.247876] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2589.248054] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2589.248221] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2589.248397] env[62875]: DEBUG nova.virt.hardware [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2589.249250] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a71732-71c1-4edf-ad5b-e74411b730ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2589.257411] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead23c0e-3391-4ca9-bf2e-4b766e608109 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2589.324906] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2590.083104] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a5f9b278-6c02-4d5e-997a-97a8fa8944ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.083359] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a9cc9da5-b40c-492d-92a5-85e760290be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.083403] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 0ea0366f-3383-4da0-abf4-c8cbfa199809 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.083507] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 467b53e0-1614-4624-841d-1310271825bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.586930] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 894b9113-47ae-4b50-ae42-682be81324ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2590.587199] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2590.587350] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2590.648624] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cc7179-6b01-4ea8-9003-b894d2d02212 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.656200] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed821e7-fd16-4aa5-ba71-2b4b602bb3ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.686799] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e52608-1395-4f1a-9783-9b46d3c53abc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.693943] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef40700-ef2a-4b42-87d8-26923fbeb9b7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.706600] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2591.209994] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2591.263541] env[62875]: DEBUG nova.compute.manager [req-59cf1807-ee7d-47ee-bb02-b3f922c96304 req-99c4b2af-8f9f-4350-8f4e-0e77bf9ec931 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Received event network-vif-plugged-4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2591.263768] env[62875]: DEBUG oslo_concurrency.lockutils [req-59cf1807-ee7d-47ee-bb02-b3f922c96304 req-99c4b2af-8f9f-4350-8f4e-0e77bf9ec931 service nova] Acquiring lock "0ea0366f-3383-4da0-abf4-c8cbfa199809-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2591.264070] env[62875]: DEBUG oslo_concurrency.lockutils [req-59cf1807-ee7d-47ee-bb02-b3f922c96304 req-99c4b2af-8f9f-4350-8f4e-0e77bf9ec931 service nova] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2591.264192] env[62875]: DEBUG oslo_concurrency.lockutils [req-59cf1807-ee7d-47ee-bb02-b3f922c96304 req-99c4b2af-8f9f-4350-8f4e-0e77bf9ec931 service nova] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2591.264430] env[62875]: DEBUG nova.compute.manager [req-59cf1807-ee7d-47ee-bb02-b3f922c96304 req-99c4b2af-8f9f-4350-8f4e-0e77bf9ec931 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] No waiting events found dispatching network-vif-plugged-4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2591.264558] env[62875]: WARNING nova.compute.manager [req-59cf1807-ee7d-47ee-bb02-b3f922c96304 req-99c4b2af-8f9f-4350-8f4e-0e77bf9ec931 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Received unexpected event network-vif-plugged-4d47d085-7a43-409d-a8ef-700e94f5ea06 for instance with vm_state building and task_state spawning. [ 2591.402941] env[62875]: DEBUG nova.network.neutron [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Successfully updated port: 4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2591.567831] env[62875]: DEBUG nova.network.neutron [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Successfully updated port: 70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2591.714766] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2591.714965] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.656s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2591.715318] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.390s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2591.716995] env[62875]: INFO nova.compute.claims [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2591.905661] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2591.905838] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2591.905982] env[62875]: DEBUG nova.network.neutron [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2592.070577] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2592.070740] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquired lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2592.070935] env[62875]: DEBUG nova.network.neutron [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2592.436802] env[62875]: DEBUG nova.network.neutron [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2592.558591] env[62875]: DEBUG nova.network.neutron [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Updating instance_info_cache with network_info: [{"id": "4d47d085-7a43-409d-a8ef-700e94f5ea06", "address": "fa:16:3e:a6:4a:f8", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d47d085-7a", "ovs_interfaceid": "4d47d085-7a43-409d-a8ef-700e94f5ea06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2592.601417] env[62875]: DEBUG nova.network.neutron [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2592.724582] env[62875]: DEBUG nova.network.neutron [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updating instance_info_cache with network_info: [{"id": "70b54fc5-7469-4d04-87af-5fe0aaca4e9e", "address": "fa:16:3e:b3:f4:16", "network": {"id": "38266f56-647e-4be5-82cc-b3a63ee6e909", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1848349969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e9bffbf1f46465286d8bc197f4b8c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b54fc5-74", "ovs_interfaceid": "70b54fc5-7469-4d04-87af-5fe0aaca4e9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2592.792180] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b277a7d-e3c2-405a-bdb3-fb6d7ba32691 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2592.800546] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3df0588-14f6-41ef-8772-d0e6ee795b12 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2592.834052] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e50bf5-9a03-4560-a546-4fb33f91fd5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2592.841405] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3300ad4d-a212-4795-b87f-97ca0a902328 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2592.854140] env[62875]: DEBUG nova.compute.provider_tree [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2593.061841] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2593.062201] env[62875]: DEBUG nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Instance network_info: |[{"id": "4d47d085-7a43-409d-a8ef-700e94f5ea06", "address": "fa:16:3e:a6:4a:f8", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d47d085-7a", "ovs_interfaceid": "4d47d085-7a43-409d-a8ef-700e94f5ea06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2593.062673] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:4a:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d47d085-7a43-409d-a8ef-700e94f5ea06', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2593.070279] env[62875]: DEBUG oslo.service.loopingcall [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2593.070487] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2593.070708] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85c6a04e-ade1-4ec3-a945-51c1c4797364 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2593.090283] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2593.090283] env[62875]: value = "task-2180712" [ 2593.090283] env[62875]: _type = "Task" [ 2593.090283] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2593.097700] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180712, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2593.228957] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Releasing lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2593.229325] env[62875]: DEBUG nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Instance network_info: |[{"id": "70b54fc5-7469-4d04-87af-5fe0aaca4e9e", "address": "fa:16:3e:b3:f4:16", "network": {"id": "38266f56-647e-4be5-82cc-b3a63ee6e909", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1848349969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e9bffbf1f46465286d8bc197f4b8c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b54fc5-74", "ovs_interfaceid": "70b54fc5-7469-4d04-87af-5fe0aaca4e9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2593.229751] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:f4:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70b54fc5-7469-4d04-87af-5fe0aaca4e9e', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2593.237180] env[62875]: DEBUG oslo.service.loopingcall [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2593.237398] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 467b53e0-1614-4624-841d-1310271825bc] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2593.237620] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90f6212a-f4e3-4cc4-88f7-ffeffa03bb7f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2593.257968] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2593.257968] env[62875]: value = "task-2180713" [ 2593.257968] env[62875]: _type = "Task" [ 2593.257968] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2593.265709] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180713, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2593.292900] env[62875]: DEBUG nova.compute.manager [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Received event network-changed-4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2593.293277] env[62875]: DEBUG nova.compute.manager [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Refreshing instance network info cache due to event network-changed-4d47d085-7a43-409d-a8ef-700e94f5ea06. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2593.293547] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Acquiring lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2593.293713] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Acquired lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2593.293913] env[62875]: DEBUG nova.network.neutron [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Refreshing network info cache for port 4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2593.357285] env[62875]: DEBUG nova.scheduler.client.report [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2593.601590] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180712, 'name': CreateVM_Task, 'duration_secs': 0.508031} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2593.602142] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2593.602512] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2593.602712] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2593.603160] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2593.603411] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa752c9c-4fb8-4847-981a-e929087a420a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2593.608164] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2593.608164] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5263d3fd-f427-6a52-71c0-b98dccad14d0" [ 2593.608164] env[62875]: _type = "Task" [ 2593.608164] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2593.615388] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5263d3fd-f427-6a52-71c0-b98dccad14d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2593.767877] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180713, 'name': CreateVM_Task, 'duration_secs': 0.391035} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2593.768048] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 467b53e0-1614-4624-841d-1310271825bc] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2593.768747] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2593.862351] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.147s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2593.862844] env[62875]: DEBUG nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2593.983818] env[62875]: DEBUG nova.network.neutron [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Updated VIF entry in instance network info cache for port 4d47d085-7a43-409d-a8ef-700e94f5ea06. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2593.984197] env[62875]: DEBUG nova.network.neutron [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Updating instance_info_cache with network_info: [{"id": "4d47d085-7a43-409d-a8ef-700e94f5ea06", "address": "fa:16:3e:a6:4a:f8", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d47d085-7a", "ovs_interfaceid": "4d47d085-7a43-409d-a8ef-700e94f5ea06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2594.119979] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5263d3fd-f427-6a52-71c0-b98dccad14d0, 'name': SearchDatastore_Task, 'duration_secs': 0.01234} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2594.119979] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2594.119979] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2594.119979] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2594.119979] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2594.119979] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2594.119979] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2594.120367] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2594.120554] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c2fa692-fabf-47a1-bb7f-68a25efec863 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.122381] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-892e8afb-3d7c-4a0c-a45e-1ee31c6e2595 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.128849] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2594.128849] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52652f7b-f119-9499-a3d8-82e5f3f35685" [ 2594.128849] env[62875]: _type = "Task" [ 2594.128849] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.135535] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2594.135818] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2594.137400] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f66536ba-e0e0-4cde-b19d-1a5b5ce4f01a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.146288] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52652f7b-f119-9499-a3d8-82e5f3f35685, 'name': SearchDatastore_Task, 'duration_secs': 0.008901} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2594.147230] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2594.147510] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2594.147840] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2594.149822] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2594.149822] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5262c521-3714-32d9-2cb8-d4f5d79c85e1" [ 2594.149822] env[62875]: _type = "Task" [ 2594.149822] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.161982] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5262c521-3714-32d9-2cb8-d4f5d79c85e1, 'name': SearchDatastore_Task, 'duration_secs': 0.008333} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2594.163151] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d03251f4-6945-423d-aa36-f4f237f35f21 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.169822] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2594.169822] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52880268-b584-0079-bae0-fb8829bd07dd" [ 2594.169822] env[62875]: _type = "Task" [ 2594.169822] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.182287] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52880268-b584-0079-bae0-fb8829bd07dd, 'name': SearchDatastore_Task, 'duration_secs': 0.008424} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2594.182668] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2594.183062] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 0ea0366f-3383-4da0-abf4-c8cbfa199809/0ea0366f-3383-4da0-abf4-c8cbfa199809.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2594.183482] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2594.183771] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2594.184086] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3288f047-71d5-49bf-adb1-bec1c170e853 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.187139] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7b53138-fc6b-422a-b84f-7e9c23895a4d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.194617] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2594.194617] env[62875]: value = "task-2180714" [ 2594.194617] env[62875]: _type = "Task" [ 2594.194617] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.200644] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2594.200982] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2594.202247] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0ce0124-eb57-4bb8-8591-877bfe6a971b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.211599] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2594.211599] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52329806-028f-82b4-1656-afb324e34bb4" [ 2594.211599] env[62875]: _type = "Task" [ 2594.211599] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.216275] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2594.228481] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52329806-028f-82b4-1656-afb324e34bb4, 'name': SearchDatastore_Task, 'duration_secs': 0.007123} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2594.229720] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fe4b6ac-d986-471e-88c0-d6e29d888ecd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.236607] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2594.236607] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527cdbfc-495a-12de-91d4-18e3a1378131" [ 2594.236607] env[62875]: _type = "Task" [ 2594.236607] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.249128] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527cdbfc-495a-12de-91d4-18e3a1378131, 'name': SearchDatastore_Task, 'duration_secs': 0.008366} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2594.249551] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2594.249988] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 467b53e0-1614-4624-841d-1310271825bc/467b53e0-1614-4624-841d-1310271825bc.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2594.250377] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94b2b515-ae40-4eb7-a58e-4f55503f3f3a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.258336] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2594.258336] env[62875]: value = "task-2180715" [ 2594.258336] env[62875]: _type = "Task" [ 2594.258336] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.271202] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2594.368847] env[62875]: DEBUG nova.compute.utils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2594.371227] env[62875]: DEBUG nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2594.371539] env[62875]: DEBUG nova.network.neutron [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2594.416967] env[62875]: DEBUG nova.policy [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '031eae8d1cb04060b1aa46aad5638454', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '120372b8f4fc452fb0cdf362fbf8431a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2594.486924] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Releasing lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2594.487432] env[62875]: DEBUG nova.compute.manager [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Received event network-vif-plugged-70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2594.487702] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2594.487924] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Lock "467b53e0-1614-4624-841d-1310271825bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2594.488110] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Lock "467b53e0-1614-4624-841d-1310271825bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2594.488337] env[62875]: DEBUG nova.compute.manager [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] No waiting events found dispatching network-vif-plugged-70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2594.488512] env[62875]: WARNING nova.compute.manager [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Received unexpected event network-vif-plugged-70b54fc5-7469-4d04-87af-5fe0aaca4e9e for instance with vm_state building and task_state spawning. [ 2594.488683] env[62875]: DEBUG nova.compute.manager [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Received event network-changed-70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2594.488829] env[62875]: DEBUG nova.compute.manager [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Refreshing instance network info cache due to event network-changed-70b54fc5-7469-4d04-87af-5fe0aaca4e9e. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2594.489087] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Acquiring lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2594.489243] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Acquired lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2594.489409] env[62875]: DEBUG nova.network.neutron [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Refreshing network info cache for port 70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2594.708359] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482507} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2594.708359] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 0ea0366f-3383-4da0-abf4-c8cbfa199809/0ea0366f-3383-4da0-abf4-c8cbfa199809.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2594.708359] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2594.708359] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65f2866a-6b90-4170-a8d7-8f37a74f8661 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.717128] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2594.717128] env[62875]: value = "task-2180716" [ 2594.717128] env[62875]: _type = "Task" [ 2594.717128] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.728943] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2594.769040] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180715, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2594.770415] env[62875]: DEBUG nova.network.neutron [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Successfully created port: 3f88b147-99aa-4953-a073-0c0f9081e907 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2594.876689] env[62875]: DEBUG nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2595.227576] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124377} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2595.227856] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2595.228668] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509469cc-4c36-477c-8579-eafcb8ec38e8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.250972] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 0ea0366f-3383-4da0-abf4-c8cbfa199809/0ea0366f-3383-4da0-abf4-c8cbfa199809.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2595.253900] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d536bf8f-1a2e-4e69-b73a-a8e72ecb0a29 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.277865] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180715, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.908836} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2595.279191] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 467b53e0-1614-4624-841d-1310271825bc/467b53e0-1614-4624-841d-1310271825bc.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2595.279405] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2595.279718] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2595.279718] env[62875]: value = "task-2180717" [ 2595.279718] env[62875]: _type = "Task" [ 2595.279718] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2595.280076] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-559ac53e-c9a3-4323-9d05-a98a4ea62fc2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.292190] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180717, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2595.293516] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2595.293516] env[62875]: value = "task-2180718" [ 2595.293516] env[62875]: _type = "Task" [ 2595.293516] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2595.303928] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180718, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2595.382057] env[62875]: DEBUG nova.network.neutron [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updated VIF entry in instance network info cache for port 70b54fc5-7469-4d04-87af-5fe0aaca4e9e. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2595.382437] env[62875]: DEBUG nova.network.neutron [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updating instance_info_cache with network_info: [{"id": "70b54fc5-7469-4d04-87af-5fe0aaca4e9e", "address": "fa:16:3e:b3:f4:16", "network": {"id": "38266f56-647e-4be5-82cc-b3a63ee6e909", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1848349969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e9bffbf1f46465286d8bc197f4b8c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b54fc5-74", "ovs_interfaceid": "70b54fc5-7469-4d04-87af-5fe0aaca4e9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2595.791837] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2595.801754] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180718, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.3442} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2595.802045] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2595.802816] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6dac44-8a20-4a44-a5be-3769515b0e35 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.826305] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 467b53e0-1614-4624-841d-1310271825bc/467b53e0-1614-4624-841d-1310271825bc.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2595.826644] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfa24f60-69f8-472b-acae-7381a365d1e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.846662] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2595.846662] env[62875]: value = "task-2180719" [ 2595.846662] env[62875]: _type = "Task" [ 2595.846662] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2595.854844] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180719, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2595.888605] env[62875]: DEBUG oslo_concurrency.lockutils [req-a293de3c-612a-4e5b-a791-04f4c167e89d req-d7cb1eac-859d-4d04-9e52-5afb4a02e2f3 service nova] Releasing lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2595.892070] env[62875]: DEBUG nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2595.920287] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2595.920615] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2595.920837] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2595.921095] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2595.921322] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2595.921534] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2595.921800] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2595.922049] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2595.922293] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2595.922510] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2595.922752] env[62875]: DEBUG nova.virt.hardware [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2595.923777] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1327a0fc-1067-4740-917c-0e7f8d08956f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.932694] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b045c028-d215-43e7-ba0e-e40f6f769e70 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2596.292223] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180717, 'name': ReconfigVM_Task, 'duration_secs': 0.668204} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2596.293332] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 0ea0366f-3383-4da0-abf4-c8cbfa199809/0ea0366f-3383-4da0-abf4-c8cbfa199809.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2596.293332] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8cddbeb-56af-43f8-a846-157e229213f1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2596.299170] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2596.299170] env[62875]: value = "task-2180720" [ 2596.299170] env[62875]: _type = "Task" [ 2596.299170] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2596.307697] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180720, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2596.357919] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180719, 'name': ReconfigVM_Task, 'duration_secs': 0.269994} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2596.358234] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 467b53e0-1614-4624-841d-1310271825bc/467b53e0-1614-4624-841d-1310271825bc.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2596.359246] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e8cc605-e70b-4399-8a69-670659300148 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2596.366577] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2596.366577] env[62875]: value = "task-2180721" [ 2596.366577] env[62875]: _type = "Task" [ 2596.366577] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2596.375812] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180721, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2596.441859] env[62875]: DEBUG nova.compute.manager [req-5c0eff4c-3089-4259-9cc2-cdd46f2b0d64 req-9eb5a28f-bd76-488b-bedb-2a3ab2e6773d service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Received event network-vif-plugged-3f88b147-99aa-4953-a073-0c0f9081e907 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2596.442136] env[62875]: DEBUG oslo_concurrency.lockutils [req-5c0eff4c-3089-4259-9cc2-cdd46f2b0d64 req-9eb5a28f-bd76-488b-bedb-2a3ab2e6773d service nova] Acquiring lock "894b9113-47ae-4b50-ae42-682be81324ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2596.442412] env[62875]: DEBUG oslo_concurrency.lockutils [req-5c0eff4c-3089-4259-9cc2-cdd46f2b0d64 req-9eb5a28f-bd76-488b-bedb-2a3ab2e6773d service nova] Lock "894b9113-47ae-4b50-ae42-682be81324ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2596.442647] env[62875]: DEBUG oslo_concurrency.lockutils [req-5c0eff4c-3089-4259-9cc2-cdd46f2b0d64 req-9eb5a28f-bd76-488b-bedb-2a3ab2e6773d service nova] Lock "894b9113-47ae-4b50-ae42-682be81324ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2596.442839] env[62875]: DEBUG nova.compute.manager [req-5c0eff4c-3089-4259-9cc2-cdd46f2b0d64 req-9eb5a28f-bd76-488b-bedb-2a3ab2e6773d service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] No waiting events found dispatching network-vif-plugged-3f88b147-99aa-4953-a073-0c0f9081e907 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2596.443100] env[62875]: WARNING nova.compute.manager [req-5c0eff4c-3089-4259-9cc2-cdd46f2b0d64 req-9eb5a28f-bd76-488b-bedb-2a3ab2e6773d service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Received unexpected event network-vif-plugged-3f88b147-99aa-4953-a073-0c0f9081e907 for instance with vm_state building and task_state spawning. [ 2596.553132] env[62875]: DEBUG nova.network.neutron [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Successfully updated port: 3f88b147-99aa-4953-a073-0c0f9081e907 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2596.810556] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180720, 'name': Rename_Task, 'duration_secs': 0.135658} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2596.810830] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2596.811060] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1fd4ed8-1e71-4063-a98b-bdeb3dff4096 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2596.817565] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2596.817565] env[62875]: value = "task-2180722" [ 2596.817565] env[62875]: _type = "Task" [ 2596.817565] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2596.824880] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2596.875619] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180721, 'name': Rename_Task, 'duration_secs': 0.138589} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2596.875870] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2596.876122] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9dd7f550-c465-4938-ac96-46dbd485753d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2596.882537] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2596.882537] env[62875]: value = "task-2180723" [ 2596.882537] env[62875]: _type = "Task" [ 2596.882537] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2596.890089] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180723, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2597.053860] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2597.054068] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2597.054214] env[62875]: DEBUG nova.network.neutron [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2597.328030] env[62875]: DEBUG oslo_vmware.api [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180722, 'name': PowerOnVM_Task, 'duration_secs': 0.422079} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2597.328258] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2597.328440] env[62875]: INFO nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Took 10.26 seconds to spawn the instance on the hypervisor. [ 2597.328628] env[62875]: DEBUG nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2597.329427] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631168c1-13c3-4f3b-8697-7a33acd9feda {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2597.392297] env[62875]: DEBUG oslo_vmware.api [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180723, 'name': PowerOnVM_Task, 'duration_secs': 0.429162} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2597.392569] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2597.392771] env[62875]: INFO nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Took 8.17 seconds to spawn the instance on the hypervisor. [ 2597.392948] env[62875]: DEBUG nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2597.393702] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18af61f-3949-4af2-842a-8e3c77deafef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2597.583731] env[62875]: DEBUG nova.network.neutron [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2597.714432] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2597.778480] env[62875]: DEBUG nova.network.neutron [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Updating instance_info_cache with network_info: [{"id": "3f88b147-99aa-4953-a073-0c0f9081e907", "address": "fa:16:3e:d2:84:96", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f88b147-99", "ovs_interfaceid": "3f88b147-99aa-4953-a073-0c0f9081e907", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2597.847487] env[62875]: INFO nova.compute.manager [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Took 15.22 seconds to build instance. [ 2597.909236] env[62875]: INFO nova.compute.manager [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Took 13.09 seconds to build instance. [ 2598.130517] env[62875]: DEBUG nova.compute.manager [req-0fb2484f-2350-4df9-b9ce-c2240414e27d req-8d982371-e17e-49c1-98e0-9f14410fc77b service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Received event network-changed-70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2598.130863] env[62875]: DEBUG nova.compute.manager [req-0fb2484f-2350-4df9-b9ce-c2240414e27d req-8d982371-e17e-49c1-98e0-9f14410fc77b service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Refreshing instance network info cache due to event network-changed-70b54fc5-7469-4d04-87af-5fe0aaca4e9e. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2598.130943] env[62875]: DEBUG oslo_concurrency.lockutils [req-0fb2484f-2350-4df9-b9ce-c2240414e27d req-8d982371-e17e-49c1-98e0-9f14410fc77b service nova] Acquiring lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2598.131099] env[62875]: DEBUG oslo_concurrency.lockutils [req-0fb2484f-2350-4df9-b9ce-c2240414e27d req-8d982371-e17e-49c1-98e0-9f14410fc77b service nova] Acquired lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2598.131262] env[62875]: DEBUG nova.network.neutron [req-0fb2484f-2350-4df9-b9ce-c2240414e27d req-8d982371-e17e-49c1-98e0-9f14410fc77b service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Refreshing network info cache for port 70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2598.282024] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2598.282024] env[62875]: DEBUG nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Instance network_info: |[{"id": "3f88b147-99aa-4953-a073-0c0f9081e907", "address": "fa:16:3e:d2:84:96", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f88b147-99", "ovs_interfaceid": "3f88b147-99aa-4953-a073-0c0f9081e907", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2598.282349] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:84:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f88b147-99aa-4953-a073-0c0f9081e907', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2598.290748] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Creating folder: Project (120372b8f4fc452fb0cdf362fbf8431a). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2598.291063] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d36b76a5-c45e-4b6f-9143-cf37283c07d3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2598.304403] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Created folder: Project (120372b8f4fc452fb0cdf362fbf8431a) in parent group-v444854. [ 2598.304612] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Creating folder: Instances. Parent ref: group-v445021. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2598.304858] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74ab0e80-7512-4663-a984-39470ad8d6f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2598.314121] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Created folder: Instances in parent group-v445021. [ 2598.314472] env[62875]: DEBUG oslo.service.loopingcall [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2598.314681] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2598.315052] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3aada0c-ddd2-4388-9ffb-e623b1b4d911 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2598.336955] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2598.336955] env[62875]: value = "task-2180726" [ 2598.336955] env[62875]: _type = "Task" [ 2598.336955] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2598.344737] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180726, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2598.349210] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0ee0ae36-fb5c-47c9-9de5-86b4e6e761b9 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.725s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2598.410960] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d15ccc98-7777-4154-8909-c11f28b871ac tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.605s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2598.471680] env[62875]: DEBUG nova.compute.manager [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Received event network-changed-3f88b147-99aa-4953-a073-0c0f9081e907 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2598.471849] env[62875]: DEBUG nova.compute.manager [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Refreshing instance network info cache due to event network-changed-3f88b147-99aa-4953-a073-0c0f9081e907. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2598.472062] env[62875]: DEBUG oslo_concurrency.lockutils [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] Acquiring lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2598.472207] env[62875]: DEBUG oslo_concurrency.lockutils [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] Acquired lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2598.472376] env[62875]: DEBUG nova.network.neutron [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Refreshing network info cache for port 3f88b147-99aa-4953-a073-0c0f9081e907 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2598.846680] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180726, 'name': CreateVM_Task, 'duration_secs': 0.388445} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2598.846880] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2598.847526] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2598.847819] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2598.847995] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2598.848257] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0336a1a2-0c49-4481-8d3c-177c3363ecb6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2598.852947] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2598.852947] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d88f90-c4b6-21d7-9412-dc9aa61e2d2c" [ 2598.852947] env[62875]: _type = "Task" [ 2598.852947] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2598.860337] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d88f90-c4b6-21d7-9412-dc9aa61e2d2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2598.861157] env[62875]: DEBUG nova.network.neutron [req-0fb2484f-2350-4df9-b9ce-c2240414e27d req-8d982371-e17e-49c1-98e0-9f14410fc77b service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updated VIF entry in instance network info cache for port 70b54fc5-7469-4d04-87af-5fe0aaca4e9e. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2598.861497] env[62875]: DEBUG nova.network.neutron [req-0fb2484f-2350-4df9-b9ce-c2240414e27d req-8d982371-e17e-49c1-98e0-9f14410fc77b service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updating instance_info_cache with network_info: [{"id": "70b54fc5-7469-4d04-87af-5fe0aaca4e9e", "address": "fa:16:3e:b3:f4:16", "network": {"id": "38266f56-647e-4be5-82cc-b3a63ee6e909", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1848349969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e9bffbf1f46465286d8bc197f4b8c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b54fc5-74", "ovs_interfaceid": "70b54fc5-7469-4d04-87af-5fe0aaca4e9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2599.158413] env[62875]: DEBUG nova.network.neutron [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Updated VIF entry in instance network info cache for port 3f88b147-99aa-4953-a073-0c0f9081e907. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2599.158780] env[62875]: DEBUG nova.network.neutron [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Updating instance_info_cache with network_info: [{"id": "3f88b147-99aa-4953-a073-0c0f9081e907", "address": "fa:16:3e:d2:84:96", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f88b147-99", "ovs_interfaceid": "3f88b147-99aa-4953-a073-0c0f9081e907", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2599.363478] env[62875]: DEBUG oslo_concurrency.lockutils [req-0fb2484f-2350-4df9-b9ce-c2240414e27d req-8d982371-e17e-49c1-98e0-9f14410fc77b service nova] Releasing lock "refresh_cache-467b53e0-1614-4624-841d-1310271825bc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2599.363875] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d88f90-c4b6-21d7-9412-dc9aa61e2d2c, 'name': SearchDatastore_Task, 'duration_secs': 0.009232} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2599.364153] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2599.364374] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2599.364603] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2599.364749] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2599.364926] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2599.365188] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b418f61a-48db-408d-8252-d89ef599a490 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2599.373338] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2599.373511] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2599.374195] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38ea6a8a-ffb4-4ea6-b05b-93f8d879c9a8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2599.379234] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2599.379234] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4784-f72b-b533-5eb4-e5fb477725c8" [ 2599.379234] env[62875]: _type = "Task" [ 2599.379234] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2599.386356] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4784-f72b-b533-5eb4-e5fb477725c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2599.661367] env[62875]: DEBUG oslo_concurrency.lockutils [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] Releasing lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2599.661590] env[62875]: DEBUG nova.compute.manager [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Received event network-changed-4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2599.661762] env[62875]: DEBUG nova.compute.manager [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Refreshing instance network info cache due to event network-changed-4d47d085-7a43-409d-a8ef-700e94f5ea06. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2599.661976] env[62875]: DEBUG oslo_concurrency.lockutils [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] Acquiring lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2599.662135] env[62875]: DEBUG oslo_concurrency.lockutils [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] Acquired lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2599.662302] env[62875]: DEBUG nova.network.neutron [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Refreshing network info cache for port 4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2599.890758] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4784-f72b-b533-5eb4-e5fb477725c8, 'name': SearchDatastore_Task, 'duration_secs': 0.007662} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2599.891622] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51fa6aed-3c37-424d-a35a-77ae91d150cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2599.897443] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2599.897443] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e827fc-3c6d-f4d0-f3c1-aa36cf90deaa" [ 2599.897443] env[62875]: _type = "Task" [ 2599.897443] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2599.905627] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e827fc-3c6d-f4d0-f3c1-aa36cf90deaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2600.372942] env[62875]: DEBUG nova.network.neutron [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Updated VIF entry in instance network info cache for port 4d47d085-7a43-409d-a8ef-700e94f5ea06. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2600.373357] env[62875]: DEBUG nova.network.neutron [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Updating instance_info_cache with network_info: [{"id": "4d47d085-7a43-409d-a8ef-700e94f5ea06", "address": "fa:16:3e:a6:4a:f8", "network": {"id": "8561e33c-42c3-4c31-b091-599607069170", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1603218412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b93283112aad44f4833c1cc017a566db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d47d085-7a", "ovs_interfaceid": "4d47d085-7a43-409d-a8ef-700e94f5ea06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2600.409361] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e827fc-3c6d-f4d0-f3c1-aa36cf90deaa, 'name': SearchDatastore_Task, 'duration_secs': 0.00957} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2600.409639] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2600.409899] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba/894b9113-47ae-4b50-ae42-682be81324ba.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2600.410186] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51cb20a1-eea8-417c-83c9-747dcc3009cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.416744] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2600.416744] env[62875]: value = "task-2180727" [ 2600.416744] env[62875]: _type = "Task" [ 2600.416744] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2600.424713] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2600.876042] env[62875]: DEBUG oslo_concurrency.lockutils [req-6060eb83-240e-4166-9542-00773e9c7526 req-4a8e5735-9fe2-4dbb-8f1e-beb4e953e748 service nova] Releasing lock "refresh_cache-0ea0366f-3383-4da0-abf4-c8cbfa199809" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2600.926878] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431965} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2600.927159] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba/894b9113-47ae-4b50-ae42-682be81324ba.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2600.927383] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2600.927627] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2fc83d4-9939-40ac-a2d4-74c5ec229dc8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.934012] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2600.934012] env[62875]: value = "task-2180728" [ 2600.934012] env[62875]: _type = "Task" [ 2600.934012] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2600.941341] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180728, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2601.443642] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180728, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.410748} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2601.443859] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2601.444640] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fc58be-a1c1-4c43-a5ea-1adbfb12baf7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.466363] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba/894b9113-47ae-4b50-ae42-682be81324ba.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2601.466598] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-825a8186-cc69-4a2e-8521-444d2f3e3856 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.485906] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2601.485906] env[62875]: value = "task-2180729" [ 2601.485906] env[62875]: _type = "Task" [ 2601.485906] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2601.493363] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180729, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2601.996797] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180729, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2602.496377] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180729, 'name': ReconfigVM_Task, 'duration_secs': 0.637369} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2602.496662] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba/894b9113-47ae-4b50-ae42-682be81324ba.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2602.497288] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78448562-1f01-48b2-8d60-d6428d59feed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2602.503386] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2602.503386] env[62875]: value = "task-2180730" [ 2602.503386] env[62875]: _type = "Task" [ 2602.503386] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2602.510749] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180730, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2603.012791] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180730, 'name': Rename_Task, 'duration_secs': 0.134395} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2603.013165] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2603.013315] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1717add-27a7-425d-896b-09360dd28e72 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2603.019118] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2603.019118] env[62875]: value = "task-2180731" [ 2603.019118] env[62875]: _type = "Task" [ 2603.019118] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2603.026306] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2603.529380] env[62875]: DEBUG oslo_vmware.api [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180731, 'name': PowerOnVM_Task, 'duration_secs': 0.460965} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2603.529709] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2603.529957] env[62875]: INFO nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Took 7.64 seconds to spawn the instance on the hypervisor. [ 2603.530158] env[62875]: DEBUG nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2603.530906] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c375b559-47f6-4f91-8f4f-685643cf0397 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2604.047776] env[62875]: INFO nova.compute.manager [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Took 14.74 seconds to build instance. [ 2604.550024] env[62875]: DEBUG oslo_concurrency.lockutils [None req-256ecac0-5797-4b48-9fd5-550df93ab231 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "894b9113-47ae-4b50-ae42-682be81324ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.248s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2604.783469] env[62875]: INFO nova.compute.manager [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Rescuing [ 2604.783764] env[62875]: DEBUG oslo_concurrency.lockutils [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2604.783925] env[62875]: DEBUG oslo_concurrency.lockutils [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2604.784105] env[62875]: DEBUG nova.network.neutron [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2605.532184] env[62875]: DEBUG nova.network.neutron [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Updating instance_info_cache with network_info: [{"id": "3f88b147-99aa-4953-a073-0c0f9081e907", "address": "fa:16:3e:d2:84:96", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f88b147-99", "ovs_interfaceid": "3f88b147-99aa-4953-a073-0c0f9081e907", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2606.034904] env[62875]: DEBUG oslo_concurrency.lockutils [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "refresh_cache-894b9113-47ae-4b50-ae42-682be81324ba" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2607.570748] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2607.571127] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb15f3bc-8de6-40c4-b618-5ed93b9cc61f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.578587] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2607.578587] env[62875]: value = "task-2180732" [ 2607.578587] env[62875]: _type = "Task" [ 2607.578587] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2607.586000] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2608.089374] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180732, 'name': PowerOffVM_Task, 'duration_secs': 0.188118} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2608.089374] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2608.090098] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecb3f9c-19d4-451d-b0c6-d533787c17cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.108084] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea235f4-6d23-46f4-96f1-4ddfd1c302b0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.133368] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2608.133621] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b2553c0-1a07-45d6-b51e-d04657b88aa2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.140125] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2608.140125] env[62875]: value = "task-2180733" [ 2608.140125] env[62875]: _type = "Task" [ 2608.140125] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2608.147303] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180733, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2608.652364] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] VM already powered off {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2608.652708] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2608.652833] env[62875]: DEBUG oslo_concurrency.lockutils [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2608.652955] env[62875]: DEBUG oslo_concurrency.lockutils [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2608.653144] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2608.653391] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc960a1a-8af3-4b75-9e61-9c6e702068c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.662394] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2608.662555] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2608.663221] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f2b23d8-2559-4ffa-a0fb-b8a015ac3f8d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.667843] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2608.667843] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5297c58c-5ef2-8821-e10c-408fdd92b972" [ 2608.667843] env[62875]: _type = "Task" [ 2608.667843] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2608.674991] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5297c58c-5ef2-8821-e10c-408fdd92b972, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2609.178141] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5297c58c-5ef2-8821-e10c-408fdd92b972, 'name': SearchDatastore_Task, 'duration_secs': 0.007813} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2609.178898] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac543218-ba1d-4c93-9cd6-1ac5664320cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.183628] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2609.183628] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52490823-6e84-f2c4-96d9-d23f3bb0ee94" [ 2609.183628] env[62875]: _type = "Task" [ 2609.183628] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2609.190724] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52490823-6e84-f2c4-96d9-d23f3bb0ee94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2609.693763] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52490823-6e84-f2c4-96d9-d23f3bb0ee94, 'name': SearchDatastore_Task, 'duration_secs': 0.00852} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2609.694219] env[62875]: DEBUG oslo_concurrency.lockutils [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2609.694302] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk. {{(pid=62875) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2609.694580] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78a8b92a-88ad-4eff-88ed-af4ee49dabac {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.701448] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2609.701448] env[62875]: value = "task-2180734" [ 2609.701448] env[62875]: _type = "Task" [ 2609.701448] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2609.708979] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2610.211608] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448395} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2610.211854] env[62875]: INFO nova.virt.vmwareapi.ds_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk. [ 2610.212620] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cc7222-faa1-41d8-b203-794444da31ec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.236111] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2610.236352] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53861ede-0058-47b9-a69d-10d0004fd6e4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.253409] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2610.253409] env[62875]: value = "task-2180735" [ 2610.253409] env[62875]: _type = "Task" [ 2610.253409] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2610.260637] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180735, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2610.763248] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180735, 'name': ReconfigVM_Task, 'duration_secs': 0.310973} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2610.763603] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2610.764358] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37aed03-db83-4f78-94f3-68d424c613fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.788980] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43a2e473-1af5-4fb7-9d51-261e2d81c066 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.805335] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2610.805335] env[62875]: value = "task-2180736" [ 2610.805335] env[62875]: _type = "Task" [ 2610.805335] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2610.813314] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180736, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2611.315698] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180736, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2611.817837] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180736, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2612.317391] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180736, 'name': ReconfigVM_Task, 'duration_secs': 1.163105} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2612.317678] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2612.317913] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76341b75-e2eb-45af-96af-0fcce76244d6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2612.323380] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2612.323380] env[62875]: value = "task-2180737" [ 2612.323380] env[62875]: _type = "Task" [ 2612.323380] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2612.331965] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180737, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2612.833393] env[62875]: DEBUG oslo_vmware.api [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180737, 'name': PowerOnVM_Task, 'duration_secs': 0.377486} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2612.833772] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2612.836434] env[62875]: DEBUG nova.compute.manager [None req-19b1e2f0-b981-4d7b-a55e-112cfa387739 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2612.837205] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b528fb5-92db-4bd8-a1a4-51a651802ec8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2614.142314] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2614.142314] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2614.644154] env[62875]: DEBUG nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2615.168396] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2615.168663] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2615.170158] env[62875]: INFO nova.compute.claims [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2616.262848] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88437098-2ea1-44a8-b88f-398edc0e5a18 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2616.270129] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0eb4675-b080-4886-89f2-dbeb5dd5cff0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2616.300296] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69bd75e-ae77-490c-9753-1bf985a38d68 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2616.307660] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fa4f80-fa53-4f5e-91ea-aa6235e3d4af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2616.320519] env[62875]: DEBUG nova.compute.provider_tree [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2616.839892] env[62875]: ERROR nova.scheduler.client.report [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [req-4840f4fc-4f4e-491c-bc02-3ad9b10674eb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4840f4fc-4f4e-491c-bc02-3ad9b10674eb"}]} [ 2616.855344] env[62875]: DEBUG nova.scheduler.client.report [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2616.866642] env[62875]: DEBUG nova.scheduler.client.report [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2616.866845] env[62875]: DEBUG nova.compute.provider_tree [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2616.876400] env[62875]: DEBUG nova.scheduler.client.report [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2616.892416] env[62875]: DEBUG nova.scheduler.client.report [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2616.959856] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c4a2ea-4921-42f7-9db4-e7b101921189 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2616.967309] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd86b18-7126-4678-974f-8a1dd73437e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2616.995609] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de407ee9-3e28-40f9-a350-724c4e031cd6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2617.002652] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71dd48b-48d5-4d68-ac9c-29a8bc9b4dc5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2617.016548] env[62875]: DEBUG nova.compute.provider_tree [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2617.545969] env[62875]: DEBUG nova.scheduler.client.report [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 129 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2617.546257] env[62875]: DEBUG nova.compute.provider_tree [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 129 to 130 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2617.546481] env[62875]: DEBUG nova.compute.provider_tree [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2618.051691] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.883s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2618.052232] env[62875]: DEBUG nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2618.558268] env[62875]: DEBUG nova.compute.utils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2618.559656] env[62875]: DEBUG nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2618.559877] env[62875]: DEBUG nova.network.neutron [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2618.608784] env[62875]: DEBUG nova.policy [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '031eae8d1cb04060b1aa46aad5638454', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '120372b8f4fc452fb0cdf362fbf8431a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2618.892448] env[62875]: DEBUG nova.network.neutron [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Successfully created port: d7159e37-51bd-4bf2-9b27-dbd2b9d29558 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2619.063514] env[62875]: DEBUG nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2620.072986] env[62875]: DEBUG nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2620.097128] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2620.097389] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2620.097546] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2620.097729] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2620.097875] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2620.098030] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2620.098243] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2620.098432] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2620.098628] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2620.098796] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2620.098966] env[62875]: DEBUG nova.virt.hardware [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2620.099853] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99df52d-f3a2-40e7-9363-a8fb198ef1dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2620.107465] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d5ae2b-5ee2-4750-85c7-cb8bf68cce92 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2620.269570] env[62875]: DEBUG nova.compute.manager [req-656ee20f-0d40-4ba0-9329-840989b09000 req-99c1c0e8-6769-4c42-b72f-9eef0ff3c539 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Received event network-vif-plugged-d7159e37-51bd-4bf2-9b27-dbd2b9d29558 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2620.269626] env[62875]: DEBUG oslo_concurrency.lockutils [req-656ee20f-0d40-4ba0-9329-840989b09000 req-99c1c0e8-6769-4c42-b72f-9eef0ff3c539 service nova] Acquiring lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2620.269813] env[62875]: DEBUG oslo_concurrency.lockutils [req-656ee20f-0d40-4ba0-9329-840989b09000 req-99c1c0e8-6769-4c42-b72f-9eef0ff3c539 service nova] Lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2620.269988] env[62875]: DEBUG oslo_concurrency.lockutils [req-656ee20f-0d40-4ba0-9329-840989b09000 req-99c1c0e8-6769-4c42-b72f-9eef0ff3c539 service nova] Lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2620.270169] env[62875]: DEBUG nova.compute.manager [req-656ee20f-0d40-4ba0-9329-840989b09000 req-99c1c0e8-6769-4c42-b72f-9eef0ff3c539 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] No waiting events found dispatching network-vif-plugged-d7159e37-51bd-4bf2-9b27-dbd2b9d29558 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2620.270333] env[62875]: WARNING nova.compute.manager [req-656ee20f-0d40-4ba0-9329-840989b09000 req-99c1c0e8-6769-4c42-b72f-9eef0ff3c539 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Received unexpected event network-vif-plugged-d7159e37-51bd-4bf2-9b27-dbd2b9d29558 for instance with vm_state building and task_state spawning. [ 2620.358676] env[62875]: DEBUG nova.network.neutron [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Successfully updated port: d7159e37-51bd-4bf2-9b27-dbd2b9d29558 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2620.861341] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2620.861512] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2620.861626] env[62875]: DEBUG nova.network.neutron [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2621.392796] env[62875]: DEBUG nova.network.neutron [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2621.509060] env[62875]: DEBUG nova.network.neutron [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Updating instance_info_cache with network_info: [{"id": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "address": "fa:16:3e:14:1a:75", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7159e37-51", "ovs_interfaceid": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2622.011899] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2622.012420] env[62875]: DEBUG nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Instance network_info: |[{"id": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "address": "fa:16:3e:14:1a:75", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7159e37-51", "ovs_interfaceid": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2622.013072] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:1a:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7159e37-51bd-4bf2-9b27-dbd2b9d29558', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2622.022843] env[62875]: DEBUG oslo.service.loopingcall [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2622.023054] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2622.023281] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24b0cd57-2e1c-4689-8bd4-ea8c515a2dd4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.042792] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2622.042792] env[62875]: value = "task-2180738" [ 2622.042792] env[62875]: _type = "Task" [ 2622.042792] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2622.052081] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2622.293268] env[62875]: DEBUG nova.compute.manager [req-48f4ddec-1e61-4c1d-8c6a-b053e8b0f738 req-3f59b529-282f-4e3d-b817-953bed1a3ab9 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Received event network-changed-d7159e37-51bd-4bf2-9b27-dbd2b9d29558 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2622.293358] env[62875]: DEBUG nova.compute.manager [req-48f4ddec-1e61-4c1d-8c6a-b053e8b0f738 req-3f59b529-282f-4e3d-b817-953bed1a3ab9 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Refreshing instance network info cache due to event network-changed-d7159e37-51bd-4bf2-9b27-dbd2b9d29558. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2622.293622] env[62875]: DEBUG oslo_concurrency.lockutils [req-48f4ddec-1e61-4c1d-8c6a-b053e8b0f738 req-3f59b529-282f-4e3d-b817-953bed1a3ab9 service nova] Acquiring lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2622.293770] env[62875]: DEBUG oslo_concurrency.lockutils [req-48f4ddec-1e61-4c1d-8c6a-b053e8b0f738 req-3f59b529-282f-4e3d-b817-953bed1a3ab9 service nova] Acquired lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2622.293934] env[62875]: DEBUG nova.network.neutron [req-48f4ddec-1e61-4c1d-8c6a-b053e8b0f738 req-3f59b529-282f-4e3d-b817-953bed1a3ab9 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Refreshing network info cache for port d7159e37-51bd-4bf2-9b27-dbd2b9d29558 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2622.552691] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2622.966945] env[62875]: DEBUG nova.network.neutron [req-48f4ddec-1e61-4c1d-8c6a-b053e8b0f738 req-3f59b529-282f-4e3d-b817-953bed1a3ab9 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Updated VIF entry in instance network info cache for port d7159e37-51bd-4bf2-9b27-dbd2b9d29558. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2622.967321] env[62875]: DEBUG nova.network.neutron [req-48f4ddec-1e61-4c1d-8c6a-b053e8b0f738 req-3f59b529-282f-4e3d-b817-953bed1a3ab9 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Updating instance_info_cache with network_info: [{"id": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "address": "fa:16:3e:14:1a:75", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7159e37-51", "ovs_interfaceid": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2623.053000] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2623.470173] env[62875]: DEBUG oslo_concurrency.lockutils [req-48f4ddec-1e61-4c1d-8c6a-b053e8b0f738 req-3f59b529-282f-4e3d-b817-953bed1a3ab9 service nova] Releasing lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2623.554163] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2624.054286] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2624.555513] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2625.056212] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2625.556806] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2626.058494] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2626.558664] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2627.059382] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2627.562644] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2628.062473] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 25%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2628.563309] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2629.064182] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2629.564877] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2630.066872] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180738, 'name': CreateVM_Task, 'duration_secs': 7.559516} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2630.067137] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2630.074523] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2630.074724] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2630.075084] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2630.075353] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-398c0656-14d9-4297-abad-9eab7a40e691 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2630.080611] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2630.080611] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52580ef9-f651-3df3-7eb5-da881e0e6442" [ 2630.080611] env[62875]: _type = "Task" [ 2630.080611] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2630.089009] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52580ef9-f651-3df3-7eb5-da881e0e6442, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2630.590923] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52580ef9-f651-3df3-7eb5-da881e0e6442, 'name': SearchDatastore_Task, 'duration_secs': 0.013456} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2630.591242] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2630.591242] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2630.591447] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2630.591625] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2630.591822] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2630.592164] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63020590-828c-4c3d-a6cd-37194690d456 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2630.599978] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2630.600164] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2630.600828] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f9b70b6-3691-4371-be6b-21f53da13116 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2630.605381] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2630.605381] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528bcbe9-6e61-1952-577a-c9b618bdfdd6" [ 2630.605381] env[62875]: _type = "Task" [ 2630.605381] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2630.612649] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528bcbe9-6e61-1952-577a-c9b618bdfdd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2631.115878] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528bcbe9-6e61-1952-577a-c9b618bdfdd6, 'name': SearchDatastore_Task, 'duration_secs': 0.010243} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2631.116675] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34b7182c-584e-4bdc-9388-fcde7ca24872 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2631.122018] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2631.122018] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524657e7-b6c3-fa3f-7370-6efd720546b1" [ 2631.122018] env[62875]: _type = "Task" [ 2631.122018] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2631.129213] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524657e7-b6c3-fa3f-7370-6efd720546b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2631.633108] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524657e7-b6c3-fa3f-7370-6efd720546b1, 'name': SearchDatastore_Task, 'duration_secs': 0.009335} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2631.633473] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2631.633625] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c/61cc11b0-56bf-48ce-82e6-64c5d91d177c.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2631.633881] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6492a6c7-d68e-49c0-8924-c7dd043a6df9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2631.641440] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2631.641440] env[62875]: value = "task-2180739" [ 2631.641440] env[62875]: _type = "Task" [ 2631.641440] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2631.649012] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2631.706755] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2631.706966] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2632.150634] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180739, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455368} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2632.150892] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c/61cc11b0-56bf-48ce-82e6-64c5d91d177c.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2632.151112] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2632.151358] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7c1fe51-a819-4a13-bf06-27a90a76ea33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.158164] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2632.158164] env[62875]: value = "task-2180740" [ 2632.158164] env[62875]: _type = "Task" [ 2632.158164] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2632.165998] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180740, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2632.669523] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180740, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058561} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2632.669867] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2632.670508] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c23130-f7dd-4696-9589-fe7ec66e4c6b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.691791] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c/61cc11b0-56bf-48ce-82e6-64c5d91d177c.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2632.692030] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edb10782-ae30-46ae-9d26-0e7f98e5866b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.711685] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2632.711685] env[62875]: value = "task-2180741" [ 2632.711685] env[62875]: _type = "Task" [ 2632.711685] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2632.718940] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180741, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2633.221990] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180741, 'name': ReconfigVM_Task, 'duration_secs': 0.283432} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2633.222291] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c/61cc11b0-56bf-48ce-82e6-64c5d91d177c.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2633.222914] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d361dc7d-1506-4fa3-85dd-f3da00b064b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.229892] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2633.229892] env[62875]: value = "task-2180742" [ 2633.229892] env[62875]: _type = "Task" [ 2633.229892] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2633.237433] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180742, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2633.739531] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180742, 'name': Rename_Task, 'duration_secs': 0.141419} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2633.739881] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2633.740040] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97e1e960-8d20-4177-ab36-4792a5f4382a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.746594] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2633.746594] env[62875]: value = "task-2180743" [ 2633.746594] env[62875]: _type = "Task" [ 2633.746594] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2633.753611] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180743, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2634.256480] env[62875]: DEBUG oslo_vmware.api [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180743, 'name': PowerOnVM_Task, 'duration_secs': 0.441989} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2634.256776] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2634.256982] env[62875]: INFO nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Took 14.18 seconds to spawn the instance on the hypervisor. [ 2634.257173] env[62875]: DEBUG nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2634.257999] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9f50ea-9c47-48d5-9f90-b2ce0841abb4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2634.706794] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2634.707038] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2634.775365] env[62875]: INFO nova.compute.manager [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Took 19.63 seconds to build instance. [ 2634.956769] env[62875]: INFO nova.compute.manager [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Rescuing [ 2634.957096] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2634.957306] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2634.957412] env[62875]: DEBUG nova.network.neutron [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2635.277527] env[62875]: DEBUG oslo_concurrency.lockutils [None req-1f028d04-bf5f-44fe-a967-a2147f907b9a tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.135s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2635.699710] env[62875]: DEBUG nova.network.neutron [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Updating instance_info_cache with network_info: [{"id": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "address": "fa:16:3e:14:1a:75", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7159e37-51", "ovs_interfaceid": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2636.202849] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2636.702637] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2636.706272] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2637.491908] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2637.492176] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2637.737927] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2637.738272] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82379bb7-7b64-4329-96f0-537e9b36968f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2637.746093] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2637.746093] env[62875]: value = "task-2180744" [ 2637.746093] env[62875]: _type = "Task" [ 2637.746093] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2637.756570] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2637.771839] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2637.772122] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2637.995740] env[62875]: DEBUG nova.compute.utils [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2638.256591] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180744, 'name': PowerOffVM_Task, 'duration_secs': 0.284197} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2638.256832] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2638.257614] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd45143f-25d1-4ab7-8bf1-ea4e3b3a1fc7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2638.275386] env[62875]: DEBUG nova.compute.utils [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2638.277124] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a126c3-d8ff-47a6-b084-0f2d8139f863 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2638.303600] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2638.304100] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9c85042-66bd-459c-84eb-ecf545b69122 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2638.311400] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2638.311400] env[62875]: value = "task-2180745" [ 2638.311400] env[62875]: _type = "Task" [ 2638.311400] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2638.320087] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180745, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2638.498920] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2638.781683] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2638.821693] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] VM already powered off {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2638.821898] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2638.822160] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2638.822314] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2638.822493] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2638.822728] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f53a570-7e54-48ca-a137-b9bcb9616ab1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2638.831959] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2638.832145] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2638.832804] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f14b853-e3c9-446b-8c91-5773ac4c6c1a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2638.837502] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2638.837502] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52daef2a-fc0b-de09-59fc-5526a1363d02" [ 2638.837502] env[62875]: _type = "Task" [ 2638.837502] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2638.844666] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52daef2a-fc0b-de09-59fc-5526a1363d02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2639.348468] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52daef2a-fc0b-de09-59fc-5526a1363d02, 'name': SearchDatastore_Task, 'duration_secs': 0.008513} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2639.349317] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9056c04d-0830-468b-9e33-9e2d88c522fe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2639.354427] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2639.354427] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528e8042-eea6-d9d4-1673-781b129d44ec" [ 2639.354427] env[62875]: _type = "Task" [ 2639.354427] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2639.361889] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528e8042-eea6-d9d4-1673-781b129d44ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2639.664295] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2639.664689] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2639.664734] env[62875]: INFO nova.compute.manager [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Attaching volume 25db3a6f-8928-4268-9345-ca7643be969e to /dev/sdb [ 2639.694301] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90236334-1f6d-49cd-9ca7-8f90029e9259 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2639.701360] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d475b8a1-34bb-4642-97c5-4d5ff93a5a12 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2639.714362] env[62875]: DEBUG nova.virt.block_device [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Updating existing volume attachment record: 67834d3b-3183-4ba5-9f78-c20dae71cfee {{(pid=62875) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2639.864074] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528e8042-eea6-d9d4-1673-781b129d44ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009539} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2639.864354] env[62875]: DEBUG oslo_concurrency.lockutils [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2639.864609] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk. {{(pid=62875) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2639.864869] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9f7adb3-96a6-48f6-bedf-601624317aa2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2639.870803] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2639.870803] env[62875]: value = "task-2180747" [ 2639.870803] env[62875]: _type = "Task" [ 2639.870803] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2639.877924] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2639.882552] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2639.882774] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2639.883050] env[62875]: INFO nova.compute.manager [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Attaching volume ca3fe324-791c-4b16-9833-fec8cd21a927 to /dev/sdb [ 2639.912526] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b415f393-6868-4f7d-8ead-5c7c26bd2d3a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2639.919582] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1db9c9b-1c22-48a6-bdb1-7a08cf79b7a8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2639.932267] env[62875]: DEBUG nova.virt.block_device [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updating existing volume attachment record: 6da605fb-f665-4180-9579-8f78cafc9b17 {{(pid=62875) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2640.382725] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462215} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2640.383015] env[62875]: INFO nova.virt.vmwareapi.ds_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk. [ 2640.383794] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c9c49e-a809-4c06-8bf9-b77c529c6263 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2640.409282] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2640.409591] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-817eb2b6-0ad0-4806-be4f-b9a423adab38 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2640.428632] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2640.428632] env[62875]: value = "task-2180753" [ 2640.428632] env[62875]: _type = "Task" [ 2640.428632] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2640.436227] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180753, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2640.706220] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2640.938983] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180753, 'name': ReconfigVM_Task, 'duration_secs': 0.266135} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2640.939434] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c/a9637bcc-4de8-4ea1-be59-4c697becf2a7-rescue.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2640.940768] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a6e184-5acf-4bb7-b754-03c46e9726ab {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2640.965750] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a8176ab-9c71-4d55-b334-6d6c84ee7bba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2640.981410] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2640.981410] env[62875]: value = "task-2180754" [ 2640.981410] env[62875]: _type = "Task" [ 2640.981410] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2640.989196] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180754, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2641.490731] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180754, 'name': ReconfigVM_Task, 'duration_secs': 0.170477} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2641.490983] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2641.491237] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b31d4250-a357-43b2-be99-6dbdab087c20 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2641.497323] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2641.497323] env[62875]: value = "task-2180755" [ 2641.497323] env[62875]: _type = "Task" [ 2641.497323] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2641.504809] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180755, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2641.706791] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2641.707175] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2641.707249] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 2642.007456] env[62875]: DEBUG oslo_vmware.api [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180755, 'name': PowerOnVM_Task, 'duration_secs': 0.389943} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2642.007654] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2642.010302] env[62875]: DEBUG nova.compute.manager [None req-3c55aab4-6104-4d1d-a980-b02473002f40 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2642.011110] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9bcea2-72f9-4807-aab0-5d5caaba05d0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2642.211959] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] There are 6 instances to clean {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 2642.212142] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: f1207e40-9d37-4439-a684-fa30c26d088a] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2642.716041] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: b71c432b-80ec-4b08-a62a-b1d5ccc56f86] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2643.181060] env[62875]: INFO nova.compute.manager [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Unrescuing [ 2643.181387] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2643.181548] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquired lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2643.181718] env[62875]: DEBUG nova.network.neutron [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2643.218964] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 760f10ab-4617-418b-b922-4f9835eb96f4] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2643.721421] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 3eef1182-4159-4d57-8e6d-c5a1a50315f4] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2643.954476] env[62875]: DEBUG nova.network.neutron [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Updating instance_info_cache with network_info: [{"id": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "address": "fa:16:3e:14:1a:75", "network": {"id": "1a5817c4-58e1-487c-8090-6754f1e7cd62", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1273216402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "120372b8f4fc452fb0cdf362fbf8431a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7159e37-51", "ovs_interfaceid": "d7159e37-51bd-4bf2-9b27-dbd2b9d29558", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2644.224225] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 67ac6916-04f3-4eb8-b7da-37a5b28b50d9] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2644.457752] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Releasing lock "refresh_cache-61cc11b0-56bf-48ce-82e6-64c5d91d177c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2644.458489] env[62875]: DEBUG nova.objects.instance [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lazy-loading 'flavor' on Instance uuid 61cc11b0-56bf-48ce-82e6-64c5d91d177c {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2644.479953] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Volume attach. Driver type: vmdk {{(pid=62875) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2644.480211] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445027', 'volume_id': 'ca3fe324-791c-4b16-9833-fec8cd21a927', 'name': 'volume-ca3fe324-791c-4b16-9833-fec8cd21a927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '467b53e0-1614-4624-841d-1310271825bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'ca3fe324-791c-4b16-9833-fec8cd21a927', 'serial': 'ca3fe324-791c-4b16-9833-fec8cd21a927'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2644.481227] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ce897d-b183-482f-bfa9-2d24ebc1d9b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2644.498521] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4fa5d2-7bfa-4e5c-ba1c-bdb6247d962a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2644.524397] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] volume-ca3fe324-791c-4b16-9833-fec8cd21a927/volume-ca3fe324-791c-4b16-9833-fec8cd21a927.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2644.524710] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-322678e4-8017-4b17-95ca-97fcf537dd9e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2644.543043] env[62875]: DEBUG oslo_vmware.api [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2644.543043] env[62875]: value = "task-2180758" [ 2644.543043] env[62875]: _type = "Task" [ 2644.543043] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2644.551120] env[62875]: DEBUG oslo_vmware.api [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180758, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2644.727580] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 7969485a-ccd6-48e0-bdea-b8920af28843] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2644.757917] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Volume attach. Driver type: vmdk {{(pid=62875) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2644.758180] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445028', 'volume_id': '25db3a6f-8928-4268-9345-ca7643be969e', 'name': 'volume-25db3a6f-8928-4268-9345-ca7643be969e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ea0366f-3383-4da0-abf4-c8cbfa199809', 'attached_at': '', 'detached_at': '', 'volume_id': '25db3a6f-8928-4268-9345-ca7643be969e', 'serial': '25db3a6f-8928-4268-9345-ca7643be969e'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2644.759244] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c830a132-be7f-4098-bec5-8d2df38dd8cc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2644.779384] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf0d116-f0ba-44ea-b594-00de1339580a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2644.804756] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] volume-25db3a6f-8928-4268-9345-ca7643be969e/volume-25db3a6f-8928-4268-9345-ca7643be969e.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2644.805111] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ea6f350-299b-439b-96e4-8b216fdc70ff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2644.824760] env[62875]: DEBUG oslo_vmware.api [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2644.824760] env[62875]: value = "task-2180759" [ 2644.824760] env[62875]: _type = "Task" [ 2644.824760] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2644.833236] env[62875]: DEBUG oslo_vmware.api [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180759, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2644.964741] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd0ce80-6c90-4288-b1ff-a4f679c557cd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2644.986294] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2644.986603] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5011c969-9720-4beb-9915-2b8f0264aa07 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2644.992664] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2644.992664] env[62875]: value = "task-2180760" [ 2644.992664] env[62875]: _type = "Task" [ 2644.992664] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2645.000191] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2645.052427] env[62875]: DEBUG oslo_vmware.api [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180758, 'name': ReconfigVM_Task, 'duration_secs': 0.37372} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2645.052693] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfigured VM instance instance-00000066 to attach disk [datastore2] volume-ca3fe324-791c-4b16-9833-fec8cd21a927/volume-ca3fe324-791c-4b16-9833-fec8cd21a927.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2645.057402] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c38ea41f-2618-408f-bb7e-17dd3c407ed2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2645.072265] env[62875]: DEBUG oslo_vmware.api [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2645.072265] env[62875]: value = "task-2180761" [ 2645.072265] env[62875]: _type = "Task" [ 2645.072265] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2645.080609] env[62875]: DEBUG oslo_vmware.api [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2645.334787] env[62875]: DEBUG oslo_vmware.api [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180759, 'name': ReconfigVM_Task, 'duration_secs': 0.352202} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2645.335017] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Reconfigured VM instance instance-00000065 to attach disk [datastore2] volume-25db3a6f-8928-4268-9345-ca7643be969e/volume-25db3a6f-8928-4268-9345-ca7643be969e.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2645.339673] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ced047a3-01f6-4e74-9178-12c84755e07e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2645.354608] env[62875]: DEBUG oslo_vmware.api [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2645.354608] env[62875]: value = "task-2180762" [ 2645.354608] env[62875]: _type = "Task" [ 2645.354608] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2645.363638] env[62875]: DEBUG oslo_vmware.api [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180762, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2645.502425] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180760, 'name': PowerOffVM_Task, 'duration_secs': 0.22468} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2645.502692] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2645.507781] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2645.508049] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e58edc1d-0a4b-4a6c-b8da-71fd8a5132a3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2645.525321] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2645.525321] env[62875]: value = "task-2180763" [ 2645.525321] env[62875]: _type = "Task" [ 2645.525321] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2645.532575] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180763, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2645.582231] env[62875]: DEBUG oslo_vmware.api [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180761, 'name': ReconfigVM_Task, 'duration_secs': 0.138253} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2645.582557] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445027', 'volume_id': 'ca3fe324-791c-4b16-9833-fec8cd21a927', 'name': 'volume-ca3fe324-791c-4b16-9833-fec8cd21a927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '467b53e0-1614-4624-841d-1310271825bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'ca3fe324-791c-4b16-9833-fec8cd21a927', 'serial': 'ca3fe324-791c-4b16-9833-fec8cd21a927'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2645.864122] env[62875]: DEBUG oslo_vmware.api [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180762, 'name': ReconfigVM_Task, 'duration_secs': 0.138004} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2645.864504] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445028', 'volume_id': '25db3a6f-8928-4268-9345-ca7643be969e', 'name': 'volume-25db3a6f-8928-4268-9345-ca7643be969e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ea0366f-3383-4da0-abf4-c8cbfa199809', 'attached_at': '', 'detached_at': '', 'volume_id': '25db3a6f-8928-4268-9345-ca7643be969e', 'serial': '25db3a6f-8928-4268-9345-ca7643be969e'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2646.034934] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180763, 'name': ReconfigVM_Task, 'duration_secs': 0.208413} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2646.035234] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2646.035417] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2646.035662] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3034fa7-3a0b-4c7e-aacd-40a32a7d662a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2646.041396] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2646.041396] env[62875]: value = "task-2180764" [ 2646.041396] env[62875]: _type = "Task" [ 2646.041396] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2646.048829] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180764, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2646.551480] env[62875]: DEBUG oslo_vmware.api [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180764, 'name': PowerOnVM_Task, 'duration_secs': 0.332658} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2646.551751] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2646.551974] env[62875]: DEBUG nova.compute.manager [None req-9dc019ec-515b-44de-bcb5-6a9f000e649c tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2646.552751] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c06cdb-89a8-4aa8-a5b4-d47f81349e1c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2646.621303] env[62875]: DEBUG nova.objects.instance [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lazy-loading 'flavor' on Instance uuid 467b53e0-1614-4624-841d-1310271825bc {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2646.901334] env[62875]: DEBUG nova.objects.instance [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lazy-loading 'flavor' on Instance uuid 0ea0366f-3383-4da0-abf4-c8cbfa199809 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2647.126286] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e996bdab-ca98-46f8-9ede-939d041e490c tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.243s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2647.279918] env[62875]: DEBUG oslo_concurrency.lockutils [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2647.405557] env[62875]: DEBUG oslo_concurrency.lockutils [None req-f41f46b5-b382-4ea0-9fa2-291fa558205c tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.741s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2647.406411] env[62875]: DEBUG oslo_concurrency.lockutils [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.127s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2647.847053] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2647.847350] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2647.847559] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2647.847776] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2647.847970] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2647.850017] env[62875]: INFO nova.compute.manager [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Terminating instance [ 2647.891297] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2647.891526] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2647.909321] env[62875]: INFO nova.compute.manager [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Detaching volume 25db3a6f-8928-4268-9345-ca7643be969e [ 2647.938355] env[62875]: INFO nova.virt.block_device [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Attempting to driver detach volume 25db3a6f-8928-4268-9345-ca7643be969e from mountpoint /dev/sdb [ 2647.938577] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Volume detach. Driver type: vmdk {{(pid=62875) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2647.938762] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445028', 'volume_id': '25db3a6f-8928-4268-9345-ca7643be969e', 'name': 'volume-25db3a6f-8928-4268-9345-ca7643be969e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ea0366f-3383-4da0-abf4-c8cbfa199809', 'attached_at': '', 'detached_at': '', 'volume_id': '25db3a6f-8928-4268-9345-ca7643be969e', 'serial': '25db3a6f-8928-4268-9345-ca7643be969e'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2647.939634] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8707397-2685-4c40-a70f-8d1bd1f4f741 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2647.960658] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd7be57-e328-48a1-bb06-22ce4128caeb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2647.967794] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4d0c7c-d660-4e37-9aed-341fc6b663c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2647.987912] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a2152b-d061-41d9-b7ac-7b71d0e49f6e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.003412] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] The volume has not been displaced from its original location: [datastore2] volume-25db3a6f-8928-4268-9345-ca7643be969e/volume-25db3a6f-8928-4268-9345-ca7643be969e.vmdk. No consolidation needed. {{(pid=62875) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2648.008636] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2648.008890] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6430bd37-5cba-4f78-9b15-f0dd39832aba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.026169] env[62875]: DEBUG oslo_vmware.api [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2648.026169] env[62875]: value = "task-2180765" [ 2648.026169] env[62875]: _type = "Task" [ 2648.026169] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2648.033406] env[62875]: DEBUG oslo_vmware.api [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180765, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2648.353837] env[62875]: DEBUG nova.compute.manager [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2648.354099] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2648.355047] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba110745-1e53-429b-bcb7-44851371b423 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.363157] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2648.363482] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f073e9a-a3b3-43a3-9afa-cef1b1a83035 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.370195] env[62875]: DEBUG oslo_vmware.api [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2648.370195] env[62875]: value = "task-2180766" [ 2648.370195] env[62875]: _type = "Task" [ 2648.370195] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2648.378411] env[62875]: DEBUG oslo_vmware.api [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2648.394621] env[62875]: DEBUG nova.compute.utils [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2648.535543] env[62875]: DEBUG oslo_vmware.api [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180765, 'name': ReconfigVM_Task, 'duration_secs': 0.210544} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2648.535810] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2648.540652] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1797d06-78ea-4dd3-afc8-e9a06d5af947 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.554920] env[62875]: DEBUG oslo_vmware.api [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2648.554920] env[62875]: value = "task-2180767" [ 2648.554920] env[62875]: _type = "Task" [ 2648.554920] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2648.562477] env[62875]: DEBUG oslo_vmware.api [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180767, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2648.879690] env[62875]: DEBUG oslo_vmware.api [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180766, 'name': PowerOffVM_Task, 'duration_secs': 0.173467} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2648.879951] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2648.880142] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2648.880383] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18fa2e2f-573d-4b89-9c9e-b546d0e1c090 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.897516] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2649.065048] env[62875]: DEBUG oslo_vmware.api [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180767, 'name': ReconfigVM_Task, 'duration_secs': 0.128446} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2649.065405] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445028', 'volume_id': '25db3a6f-8928-4268-9345-ca7643be969e', 'name': 'volume-25db3a6f-8928-4268-9345-ca7643be969e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ea0366f-3383-4da0-abf4-c8cbfa199809', 'attached_at': '', 'detached_at': '', 'volume_id': '25db3a6f-8928-4268-9345-ca7643be969e', 'serial': '25db3a6f-8928-4268-9345-ca7643be969e'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2649.315321] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2649.315546] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2649.315719] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Deleting the datastore file [datastore1] 61cc11b0-56bf-48ce-82e6-64c5d91d177c {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2649.315992] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b28039fe-a01a-4536-b791-7ce535cb7f7b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2649.322946] env[62875]: DEBUG oslo_vmware.api [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2649.322946] env[62875]: value = "task-2180769" [ 2649.322946] env[62875]: _type = "Task" [ 2649.322946] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2649.332025] env[62875]: DEBUG oslo_vmware.api [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2649.604811] env[62875]: DEBUG nova.objects.instance [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lazy-loading 'flavor' on Instance uuid 0ea0366f-3383-4da0-abf4-c8cbfa199809 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2649.832887] env[62875]: DEBUG oslo_vmware.api [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140258} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2649.833222] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2649.833426] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2649.833605] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2649.833779] env[62875]: INFO nova.compute.manager [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Took 1.48 seconds to destroy the instance on the hypervisor. [ 2649.834029] env[62875]: DEBUG oslo.service.loopingcall [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2649.834221] env[62875]: DEBUG nova.compute.manager [-] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2649.834317] env[62875]: DEBUG nova.network.neutron [-] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2649.950297] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2649.950563] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2649.950801] env[62875]: INFO nova.compute.manager [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Attaching volume 3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3 to /dev/sdc [ 2649.984736] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b708b73b-f674-4187-80fe-a10b27e141b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2649.993600] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe08c416-1478-42b7-adaf-460278dcabef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.006464] env[62875]: DEBUG nova.virt.block_device [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updating existing volume attachment record: e84df64d-34da-49a7-85d8-ce0ccceccb44 {{(pid=62875) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2650.076658] env[62875]: DEBUG nova.compute.manager [req-04f0acb3-c626-4633-8ec6-7221bf04616c req-d2b011c3-fb82-4781-a790-451e9d0b06c4 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Received event network-vif-deleted-d7159e37-51bd-4bf2-9b27-dbd2b9d29558 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2650.076913] env[62875]: INFO nova.compute.manager [req-04f0acb3-c626-4633-8ec6-7221bf04616c req-d2b011c3-fb82-4781-a790-451e9d0b06c4 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Neutron deleted interface d7159e37-51bd-4bf2-9b27-dbd2b9d29558; detaching it from the instance and deleting it from the info cache [ 2650.077065] env[62875]: DEBUG nova.network.neutron [req-04f0acb3-c626-4633-8ec6-7221bf04616c req-d2b011c3-fb82-4781-a790-451e9d0b06c4 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2650.556344] env[62875]: DEBUG nova.network.neutron [-] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2650.579048] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0eecb6f5-a803-428c-8bce-6c6baf075782 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.588344] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7dfa41-4d74-4814-9b70-5863d25dcdf0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.613759] env[62875]: DEBUG oslo_concurrency.lockutils [None req-102ff002-8d63-43ba-810b-532a3467f033 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.207s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2650.614799] env[62875]: DEBUG nova.compute.manager [req-04f0acb3-c626-4633-8ec6-7221bf04616c req-d2b011c3-fb82-4781-a790-451e9d0b06c4 service nova] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Detach interface failed, port_id=d7159e37-51bd-4bf2-9b27-dbd2b9d29558, reason: Instance 61cc11b0-56bf-48ce-82e6-64c5d91d177c could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2651.060453] env[62875]: INFO nova.compute.manager [-] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Took 1.22 seconds to deallocate network for instance. [ 2651.230554] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2651.230893] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2651.522644] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2651.522941] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2651.523192] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "0ea0366f-3383-4da0-abf4-c8cbfa199809-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2651.524917] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2651.525129] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2651.527159] env[62875]: INFO nova.compute.manager [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Terminating instance [ 2651.565797] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2651.566087] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2651.566309] env[62875]: DEBUG nova.objects.instance [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lazy-loading 'resources' on Instance uuid 61cc11b0-56bf-48ce-82e6-64c5d91d177c {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2651.769653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2651.769820] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2651.769969] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2652.031804] env[62875]: DEBUG nova.compute.manager [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2652.032035] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2652.032914] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1edc747-3256-4cdc-8fd7-6868859380dc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.041408] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2652.041641] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d451636a-e01b-4265-b814-40ec75007b52 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.048172] env[62875]: DEBUG oslo_vmware.api [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2652.048172] env[62875]: value = "task-2180771" [ 2652.048172] env[62875]: _type = "Task" [ 2652.048172] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2652.055852] env[62875]: DEBUG oslo_vmware.api [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2652.158946] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a614c779-52a2-4f9d-b189-548c2fb38acb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.166686] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d3b2fd-b7b4-47ae-965b-d017e24f7f1c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.198239] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2468b84-b702-4e4d-8552-5fdd4484189d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.206082] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0355824-b71e-4c5c-9454-44e0056b7704 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.221312] env[62875]: DEBUG nova.compute.provider_tree [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2652.557728] env[62875]: DEBUG oslo_vmware.api [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180771, 'name': PowerOffVM_Task, 'duration_secs': 0.192395} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2652.558048] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2652.558218] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2652.558426] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a69d3b2-6936-4c8d-a17a-6815e2837779 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.679741] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2652.679980] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2652.680162] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleting the datastore file [datastore2] 0ea0366f-3383-4da0-abf4-c8cbfa199809 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2652.680427] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a4b9557-cd87-4184-9302-50a68edadc41 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.686567] env[62875]: DEBUG oslo_vmware.api [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for the task: (returnval){ [ 2652.686567] env[62875]: value = "task-2180774" [ 2652.686567] env[62875]: _type = "Task" [ 2652.686567] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2652.693925] env[62875]: DEBUG oslo_vmware.api [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2652.724930] env[62875]: DEBUG nova.scheduler.client.report [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2652.970335] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updating instance_info_cache with network_info: [{"id": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "address": "fa:16:3e:85:f7:a8", "network": {"id": "2f5b9eee-275c-4337-ac75-9cc47d62c954", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-864018650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95d0f81815ea467cbc1c6160e27409fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ca62ab4-73", "ovs_interfaceid": "6ca62ab4-73e8-4b45-b05b-6e807a8a2515", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2653.196858] env[62875]: DEBUG oslo_vmware.api [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Task: {'id': task-2180774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143931} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2653.197080] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2653.197270] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2653.197445] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2653.197618] env[62875]: INFO nova.compute.manager [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2653.197881] env[62875]: DEBUG oslo.service.loopingcall [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2653.198105] env[62875]: DEBUG nova.compute.manager [-] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2653.198204] env[62875]: DEBUG nova.network.neutron [-] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2653.229930] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2653.254104] env[62875]: INFO nova.scheduler.client.report [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Deleted allocations for instance 61cc11b0-56bf-48ce-82e6-64c5d91d177c [ 2653.473531] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-a9cc9da5-b40c-492d-92a5-85e760290be9" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2653.474458] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2653.474788] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2653.669839] env[62875]: DEBUG nova.compute.manager [req-c38bf0e3-311d-47b2-bda5-ac48de397ad0 req-2900d454-6a55-41f8-833b-a9b11db9b452 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Received event network-vif-deleted-4d47d085-7a43-409d-a8ef-700e94f5ea06 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2653.670098] env[62875]: INFO nova.compute.manager [req-c38bf0e3-311d-47b2-bda5-ac48de397ad0 req-2900d454-6a55-41f8-833b-a9b11db9b452 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Neutron deleted interface 4d47d085-7a43-409d-a8ef-700e94f5ea06; detaching it from the instance and deleting it from the info cache [ 2653.670283] env[62875]: DEBUG nova.network.neutron [req-c38bf0e3-311d-47b2-bda5-ac48de397ad0 req-2900d454-6a55-41f8-833b-a9b11db9b452 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2653.761691] env[62875]: DEBUG oslo_concurrency.lockutils [None req-850cbb37-6ddf-46aa-bcbc-30408dbb5ec6 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "61cc11b0-56bf-48ce-82e6-64c5d91d177c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.914s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2653.977849] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2653.978952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2653.978952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2653.978952] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2653.979369] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4743e1db-d4a9-4ed1-930d-a986362f4a7a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2653.987793] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc819974-dc3f-4087-b2da-3b6aa8951990 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.003547] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dfaf83-3491-4546-9733-f7b75ddc28d6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.010257] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b0843b-e055-43c3-81c0-31960362041c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.039311] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180279MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2654.039463] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2654.039665] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2654.151808] env[62875]: DEBUG nova.network.neutron [-] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2654.172883] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc790bbd-92bb-4ec4-bac7-4cf5f26d6f40 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.182651] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f15b69-deb8-42c4-9e60-a49c16128631 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.212032] env[62875]: DEBUG nova.compute.manager [req-c38bf0e3-311d-47b2-bda5-ac48de397ad0 req-2900d454-6a55-41f8-833b-a9b11db9b452 service nova] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Detach interface failed, port_id=4d47d085-7a43-409d-a8ef-700e94f5ea06, reason: Instance 0ea0366f-3383-4da0-abf4-c8cbfa199809 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2654.436770] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "894b9113-47ae-4b50-ae42-682be81324ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2654.436770] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "894b9113-47ae-4b50-ae42-682be81324ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2654.436770] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "894b9113-47ae-4b50-ae42-682be81324ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2654.437020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "894b9113-47ae-4b50-ae42-682be81324ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2654.437020] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "894b9113-47ae-4b50-ae42-682be81324ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2654.439625] env[62875]: INFO nova.compute.manager [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Terminating instance [ 2654.550867] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Volume attach. Driver type: vmdk {{(pid=62875) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2654.551122] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445029', 'volume_id': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'name': 'volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '467b53e0-1614-4624-841d-1310271825bc', 'attached_at': '', 'detached_at': '', 'volume_id': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'serial': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2654.552053] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6230eb77-fa33-422a-b17c-cf84bd615473 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.570585] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2374a3e1-9f51-4445-9318-d9030b5088ec {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.602019] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3/volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2654.602360] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e3db9c0-6489-4617-9224-95857bbf7a32 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.621901] env[62875]: DEBUG oslo_vmware.api [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2654.621901] env[62875]: value = "task-2180775" [ 2654.621901] env[62875]: _type = "Task" [ 2654.621901] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2654.631784] env[62875]: DEBUG oslo_vmware.api [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180775, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2654.655182] env[62875]: INFO nova.compute.manager [-] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Took 1.46 seconds to deallocate network for instance. [ 2654.946913] env[62875]: DEBUG nova.compute.manager [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2654.947327] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2654.948511] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb0a2e0-4db8-46fc-8b7c-445517eaab3c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.956203] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2654.956478] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87a3fc70-ab07-4818-8306-4134766e08c1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.962768] env[62875]: DEBUG oslo_vmware.api [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2654.962768] env[62875]: value = "task-2180776" [ 2654.962768] env[62875]: _type = "Task" [ 2654.962768] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2654.972030] env[62875]: DEBUG oslo_vmware.api [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2655.069211] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a5f9b278-6c02-4d5e-997a-97a8fa8944ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2655.069403] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a9cc9da5-b40c-492d-92a5-85e760290be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2655.069534] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 0ea0366f-3383-4da0-abf4-c8cbfa199809 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2655.069659] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 467b53e0-1614-4624-841d-1310271825bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2655.069778] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 894b9113-47ae-4b50-ae42-682be81324ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2655.069986] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2655.070139] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2655.133812] env[62875]: DEBUG oslo_vmware.api [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180775, 'name': ReconfigVM_Task, 'duration_secs': 0.422821} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2655.134105] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfigured VM instance instance-00000066 to attach disk [datastore1] volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3/volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2655.141546] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-754cd416-7a89-4b5b-ab60-aabd132cd957 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.163820] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2655.164289] env[62875]: DEBUG oslo_vmware.api [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2655.164289] env[62875]: value = "task-2180777" [ 2655.164289] env[62875]: _type = "Task" [ 2655.164289] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2655.171170] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ef61de-cbfb-43f9-9d10-5118724a5f79 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.180717] env[62875]: DEBUG oslo_vmware.api [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2655.184535] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf8d698-0f0b-4304-a311-8a581580727c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.238599] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d11a858-754e-42a9-b573-22da8a9e35e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.249927] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a607d5-6e5f-455a-8d43-3906ee11f736 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.269336] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2655.473541] env[62875]: DEBUG oslo_vmware.api [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180776, 'name': PowerOffVM_Task, 'duration_secs': 0.266298} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2655.473833] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2655.473991] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2655.474271] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b6501de-d79c-46ab-bd4b-256700e9236a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.674053] env[62875]: DEBUG oslo_vmware.api [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180777, 'name': ReconfigVM_Task, 'duration_secs': 0.15168} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2655.674371] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445029', 'volume_id': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'name': 'volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '467b53e0-1614-4624-841d-1310271825bc', 'attached_at': '', 'detached_at': '', 'volume_id': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'serial': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2655.773041] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2656.278046] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2656.278046] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.238s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2656.278374] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.115s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2656.278641] env[62875]: DEBUG nova.objects.instance [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lazy-loading 'resources' on Instance uuid 0ea0366f-3383-4da0-abf4-c8cbfa199809 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2656.710761] env[62875]: DEBUG nova.objects.instance [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lazy-loading 'flavor' on Instance uuid 467b53e0-1614-4624-841d-1310271825bc {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2656.844940] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f685ce4-61ad-4af4-b39b-6d3de765affb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.852232] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0106612-f678-4449-a4fd-cac869a7bf9c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.884437] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2594004-818f-40be-8f62-7f9c1d0109de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.891784] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22358443-f740-4cac-beec-cd2dab0afa65 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.904467] env[62875]: DEBUG nova.compute.provider_tree [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2657.216137] env[62875]: DEBUG oslo_concurrency.lockutils [None req-5ba23aa9-2be3-47ec-b666-14dee5f669fd tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.265s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2657.875555] env[62875]: DEBUG nova.scheduler.client.report [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2657.875555] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2657.875555] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2657.912337] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.634s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2657.915884] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2657.916110] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2657.916514] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Deleting the datastore file [datastore2] 894b9113-47ae-4b50-ae42-682be81324ba {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2657.916856] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-437e80d2-d2b4-47fc-bf81-de20f200b2f6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.923497] env[62875]: DEBUG oslo_vmware.api [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for the task: (returnval){ [ 2657.923497] env[62875]: value = "task-2180779" [ 2657.923497] env[62875]: _type = "Task" [ 2657.923497] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2657.930985] env[62875]: DEBUG oslo_vmware.api [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2657.934296] env[62875]: INFO nova.scheduler.client.report [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Deleted allocations for instance 0ea0366f-3383-4da0-abf4-c8cbfa199809 [ 2658.012161] env[62875]: INFO nova.compute.manager [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Detaching volume ca3fe324-791c-4b16-9833-fec8cd21a927 [ 2658.048272] env[62875]: INFO nova.virt.block_device [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Attempting to driver detach volume ca3fe324-791c-4b16-9833-fec8cd21a927 from mountpoint /dev/sdb [ 2658.048534] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Volume detach. Driver type: vmdk {{(pid=62875) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2658.048734] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445027', 'volume_id': 'ca3fe324-791c-4b16-9833-fec8cd21a927', 'name': 'volume-ca3fe324-791c-4b16-9833-fec8cd21a927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '467b53e0-1614-4624-841d-1310271825bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'ca3fe324-791c-4b16-9833-fec8cd21a927', 'serial': 'ca3fe324-791c-4b16-9833-fec8cd21a927'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2658.049648] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6d4376-534d-4e5e-bde7-2d358e42dd4f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.077448] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f852de08-a2fd-4bcd-b352-531ad7c6da43 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.084343] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb8cc3a-b292-4dc6-a360-38ae584d6dd4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.106678] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db459ab-d8b8-46cb-bc36-056530c0c9ca {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.120730] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] The volume has not been displaced from its original location: [datastore2] volume-ca3fe324-791c-4b16-9833-fec8cd21a927/volume-ca3fe324-791c-4b16-9833-fec8cd21a927.vmdk. No consolidation needed. {{(pid=62875) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2658.125837] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2658.126058] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09e32420-ed58-43ec-b926-40f62d147756 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.142835] env[62875]: DEBUG oslo_vmware.api [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2658.142835] env[62875]: value = "task-2180780" [ 2658.142835] env[62875]: _type = "Task" [ 2658.142835] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2658.150204] env[62875]: DEBUG oslo_vmware.api [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180780, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2658.433329] env[62875]: DEBUG oslo_vmware.api [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Task: {'id': task-2180779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404882} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2658.433544] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2658.433715] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2658.433888] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2658.434077] env[62875]: INFO nova.compute.manager [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Took 3.49 seconds to destroy the instance on the hypervisor. [ 2658.434327] env[62875]: DEBUG oslo.service.loopingcall [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2658.434515] env[62875]: DEBUG nova.compute.manager [-] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2658.434610] env[62875]: DEBUG nova.network.neutron [-] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2658.444111] env[62875]: DEBUG oslo_concurrency.lockutils [None req-ba46e4cf-b014-478a-a842-1dc480040bd1 tempest-AttachVolumeNegativeTest-1439623505 tempest-AttachVolumeNegativeTest-1439623505-project-member] Lock "0ea0366f-3383-4da0-abf4-c8cbfa199809" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.921s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.652993] env[62875]: DEBUG oslo_vmware.api [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180780, 'name': ReconfigVM_Task, 'duration_secs': 0.303478} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2658.653304] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2658.659883] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a35f2f8f-9818-4f57-9f3a-6bfbb40e21c9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.674817] env[62875]: DEBUG nova.compute.manager [req-f0f97724-2c57-408c-94d9-85543d894aeb req-7aea047a-5772-44f5-a80f-7eb1b67d71fd service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Received event network-vif-deleted-3f88b147-99aa-4953-a073-0c0f9081e907 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2658.674817] env[62875]: INFO nova.compute.manager [req-f0f97724-2c57-408c-94d9-85543d894aeb req-7aea047a-5772-44f5-a80f-7eb1b67d71fd service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Neutron deleted interface 3f88b147-99aa-4953-a073-0c0f9081e907; detaching it from the instance and deleting it from the info cache [ 2658.674969] env[62875]: DEBUG nova.network.neutron [req-f0f97724-2c57-408c-94d9-85543d894aeb req-7aea047a-5772-44f5-a80f-7eb1b67d71fd service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2658.678404] env[62875]: DEBUG oslo_vmware.api [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2658.678404] env[62875]: value = "task-2180781" [ 2658.678404] env[62875]: _type = "Task" [ 2658.678404] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2658.687687] env[62875]: DEBUG oslo_vmware.api [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180781, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2659.171387] env[62875]: DEBUG nova.network.neutron [-] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2659.179615] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd56f7a1-cd99-48e1-9574-95ee10e371a4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.191624] env[62875]: DEBUG oslo_vmware.api [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180781, 'name': ReconfigVM_Task, 'duration_secs': 0.148169} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2659.192608] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445027', 'volume_id': 'ca3fe324-791c-4b16-9833-fec8cd21a927', 'name': 'volume-ca3fe324-791c-4b16-9833-fec8cd21a927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '467b53e0-1614-4624-841d-1310271825bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'ca3fe324-791c-4b16-9833-fec8cd21a927', 'serial': 'ca3fe324-791c-4b16-9833-fec8cd21a927'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2659.197371] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee37f4ce-051f-4ce8-9bc8-5051e321e9ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.222403] env[62875]: DEBUG nova.compute.manager [req-f0f97724-2c57-408c-94d9-85543d894aeb req-7aea047a-5772-44f5-a80f-7eb1b67d71fd service nova] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Detach interface failed, port_id=3f88b147-99aa-4953-a073-0c0f9081e907, reason: Instance 894b9113-47ae-4b50-ae42-682be81324ba could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2659.674120] env[62875]: INFO nova.compute.manager [-] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Took 1.24 seconds to deallocate network for instance. [ 2659.706918] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2659.707107] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances with incomplete migration {{(pid=62875) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 2659.733280] env[62875]: DEBUG nova.objects.instance [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lazy-loading 'flavor' on Instance uuid 467b53e0-1614-4624-841d-1310271825bc {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2660.180273] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2660.180555] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2660.180948] env[62875]: DEBUG nova.objects.instance [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lazy-loading 'resources' on Instance uuid 894b9113-47ae-4b50-ae42-682be81324ba {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2660.739067] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f540642-8309-4205-817f-56db09331286 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.230s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2660.746990] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99375996-3efe-4ec8-96a8-cf305673899c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.754260] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2660.754499] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2660.756500] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a3154a-0872-49fb-a6b9-e433407bab98 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.792317] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c41782-2076-4a05-8da7-7b5b90937214 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.800036] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7f0ad7-7753-42fd-aa6c-d9fb0399e926 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.814427] env[62875]: DEBUG nova.compute.provider_tree [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2661.260792] env[62875]: INFO nova.compute.manager [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Detaching volume 3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3 [ 2661.292442] env[62875]: INFO nova.virt.block_device [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Attempting to driver detach volume 3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3 from mountpoint /dev/sdc [ 2661.292681] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Volume detach. Driver type: vmdk {{(pid=62875) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2661.292872] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445029', 'volume_id': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'name': 'volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '467b53e0-1614-4624-841d-1310271825bc', 'attached_at': '', 'detached_at': '', 'volume_id': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'serial': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2661.293753] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ae3b1a-508a-45f8-853f-f8a800da2f66 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.314870] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac9dde8-b0c5-4515-9fa6-2b0bfa6ca492 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.318063] env[62875]: DEBUG nova.scheduler.client.report [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2661.325014] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9688ae97-af12-4a82-a446-0252f90c6bbe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.344993] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32ad9e3-9f0b-448c-819a-081cae62e40f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.359790] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] The volume has not been displaced from its original location: [datastore1] volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3/volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3.vmdk. No consolidation needed. {{(pid=62875) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2661.365198] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfiguring VM instance instance-00000066 to detach disk 2002 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2661.365663] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea96833c-00a0-453c-9e92-c519c2e0a19a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.385357] env[62875]: DEBUG oslo_vmware.api [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2661.385357] env[62875]: value = "task-2180783" [ 2661.385357] env[62875]: _type = "Task" [ 2661.385357] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2661.392511] env[62875]: DEBUG oslo_vmware.api [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180783, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2661.706211] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2661.822450] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2661.840023] env[62875]: INFO nova.scheduler.client.report [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Deleted allocations for instance 894b9113-47ae-4b50-ae42-682be81324ba [ 2661.895604] env[62875]: DEBUG oslo_vmware.api [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180783, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2662.347214] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d2017353-69b8-44f3-8318-365b34839c20 tempest-ServerRescueTestJSON-1904670578 tempest-ServerRescueTestJSON-1904670578-project-member] Lock "894b9113-47ae-4b50-ae42-682be81324ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.911s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2662.396495] env[62875]: DEBUG oslo_vmware.api [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180783, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2662.898579] env[62875]: DEBUG oslo_vmware.api [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180783, 'name': ReconfigVM_Task, 'duration_secs': 1.224112} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2662.898865] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Reconfigured VM instance instance-00000066 to detach disk 2002 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2662.903836] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07aebd5e-ba5a-4e38-b69f-44ea3e986590 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2662.920052] env[62875]: DEBUG oslo_vmware.api [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2662.920052] env[62875]: value = "task-2180784" [ 2662.920052] env[62875]: _type = "Task" [ 2662.920052] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2662.927918] env[62875]: DEBUG oslo_vmware.api [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180784, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2663.435174] env[62875]: DEBUG oslo_vmware.api [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180784, 'name': ReconfigVM_Task, 'duration_secs': 0.130434} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2663.435606] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445029', 'volume_id': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'name': 'volume-3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '467b53e0-1614-4624-841d-1310271825bc', 'attached_at': '', 'detached_at': '', 'volume_id': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3', 'serial': '3fb7f226-59cc-4e3e-a1b3-83cd74dff5a3'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2663.993383] env[62875]: DEBUG nova.objects.instance [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lazy-loading 'flavor' on Instance uuid 467b53e0-1614-4624-841d-1310271825bc {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2665.003783] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8aebcc27-0d78-477b-9d79-18403d585772 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.249s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2666.180763] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2666.181198] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2666.181470] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "467b53e0-1614-4624-841d-1310271825bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2666.181674] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2666.181848] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2666.183802] env[62875]: INFO nova.compute.manager [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Terminating instance [ 2666.687867] env[62875]: DEBUG nova.compute.manager [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2666.688163] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2666.689071] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cb16be-797a-47f6-a847-4b073fe88218 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2666.697952] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2666.698188] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83b6d31d-de29-4464-8c18-6070195413ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2666.704333] env[62875]: DEBUG oslo_vmware.api [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2666.704333] env[62875]: value = "task-2180785" [ 2666.704333] env[62875]: _type = "Task" [ 2666.704333] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2666.711732] env[62875]: DEBUG oslo_vmware.api [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180785, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2667.214687] env[62875]: DEBUG oslo_vmware.api [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180785, 'name': PowerOffVM_Task, 'duration_secs': 0.18048} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2667.215047] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2667.215147] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2667.215390] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d522f71e-c9f0-4b8c-b4b1-f4f5c8236ead {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.392888] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2667.393120] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2667.393306] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Deleting the datastore file [datastore2] 467b53e0-1614-4624-841d-1310271825bc {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2667.393569] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4785dce8-1dc7-41b1-baaf-a7820fb23a0e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.400194] env[62875]: DEBUG oslo_vmware.api [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for the task: (returnval){ [ 2667.400194] env[62875]: value = "task-2180787" [ 2667.400194] env[62875]: _type = "Task" [ 2667.400194] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2667.407448] env[62875]: DEBUG oslo_vmware.api [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2667.910064] env[62875]: DEBUG oslo_vmware.api [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Task: {'id': task-2180787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132444} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2667.910322] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2667.910517] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2667.910704] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2667.910891] env[62875]: INFO nova.compute.manager [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] [instance: 467b53e0-1614-4624-841d-1310271825bc] Took 1.22 seconds to destroy the instance on the hypervisor. [ 2667.911152] env[62875]: DEBUG oslo.service.loopingcall [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2667.911340] env[62875]: DEBUG nova.compute.manager [-] [instance: 467b53e0-1614-4624-841d-1310271825bc] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2667.911442] env[62875]: DEBUG nova.network.neutron [-] [instance: 467b53e0-1614-4624-841d-1310271825bc] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2668.365183] env[62875]: DEBUG nova.compute.manager [req-5257542c-90c4-4b33-b6b1-dff8185b16c8 req-8aa56254-78c4-4672-a698-9fad7e62dfc5 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Received event network-vif-deleted-70b54fc5-7469-4d04-87af-5fe0aaca4e9e {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2668.365420] env[62875]: INFO nova.compute.manager [req-5257542c-90c4-4b33-b6b1-dff8185b16c8 req-8aa56254-78c4-4672-a698-9fad7e62dfc5 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Neutron deleted interface 70b54fc5-7469-4d04-87af-5fe0aaca4e9e; detaching it from the instance and deleting it from the info cache [ 2668.365420] env[62875]: DEBUG nova.network.neutron [req-5257542c-90c4-4b33-b6b1-dff8185b16c8 req-8aa56254-78c4-4672-a698-9fad7e62dfc5 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2668.848456] env[62875]: DEBUG nova.network.neutron [-] [instance: 467b53e0-1614-4624-841d-1310271825bc] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2668.867909] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-71b43333-486d-40d4-8e41-2610ffdc4300 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2668.877554] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9af1de-061a-4db5-9dae-0d4c692c7804 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2668.901412] env[62875]: DEBUG nova.compute.manager [req-5257542c-90c4-4b33-b6b1-dff8185b16c8 req-8aa56254-78c4-4672-a698-9fad7e62dfc5 service nova] [instance: 467b53e0-1614-4624-841d-1310271825bc] Detach interface failed, port_id=70b54fc5-7469-4d04-87af-5fe0aaca4e9e, reason: Instance 467b53e0-1614-4624-841d-1310271825bc could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2669.351175] env[62875]: INFO nova.compute.manager [-] [instance: 467b53e0-1614-4624-841d-1310271825bc] Took 1.44 seconds to deallocate network for instance. [ 2669.857669] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2669.858074] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2669.858189] env[62875]: DEBUG nova.objects.instance [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lazy-loading 'resources' on Instance uuid 467b53e0-1614-4624-841d-1310271825bc {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2670.410654] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674e19e8-75a5-491e-9840-80a86f40b9a0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2670.419454] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19340c3-c4c3-413b-b28d-77cabeff82f3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2670.448877] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62381a35-4269-4431-94ff-6c598e63937e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2670.455937] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da894177-fa30-499c-968c-b1833ec84afe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2670.468960] env[62875]: DEBUG nova.compute.provider_tree [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2670.999559] env[62875]: DEBUG nova.scheduler.client.report [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2670.999873] env[62875]: DEBUG nova.compute.provider_tree [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 130 to 131 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2671.000008] env[62875]: DEBUG nova.compute.provider_tree [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2671.504583] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.646s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2671.526215] env[62875]: INFO nova.scheduler.client.report [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Deleted allocations for instance 467b53e0-1614-4624-841d-1310271825bc [ 2672.035512] env[62875]: DEBUG oslo_concurrency.lockutils [None req-894b0715-0d55-4b8e-838d-32248eb3c923 tempest-AttachVolumeTestJSON-271302038 tempest-AttachVolumeTestJSON-271302038-project-member] Lock "467b53e0-1614-4624-841d-1310271825bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.854s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2692.841718] env[62875]: INFO nova.compute.manager [None req-10beb083-4687-4a54-b912-4d41e4dfe816 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Get console output [ 2692.842037] env[62875]: WARNING nova.virt.vmwareapi.driver [None req-10beb083-4687-4a54-b912-4d41e4dfe816 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] The console log is missing. Check your VSPC configuration [ 2693.209832] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2693.209989] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2694.046456] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "a9cc9da5-b40c-492d-92a5-85e760290be9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2694.046826] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a9cc9da5-b40c-492d-92a5-85e760290be9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2694.046931] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "a9cc9da5-b40c-492d-92a5-85e760290be9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2694.047132] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a9cc9da5-b40c-492d-92a5-85e760290be9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2694.047349] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a9cc9da5-b40c-492d-92a5-85e760290be9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2694.050481] env[62875]: INFO nova.compute.manager [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Terminating instance [ 2694.554140] env[62875]: DEBUG nova.compute.manager [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2694.554390] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2694.555332] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34bdfb9-b450-46c1-9e0e-775aa7db74fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2694.563243] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2694.563465] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-259d1297-991c-4135-a837-2529d4e567de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2694.570067] env[62875]: DEBUG oslo_vmware.api [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2694.570067] env[62875]: value = "task-2180790" [ 2694.570067] env[62875]: _type = "Task" [ 2694.570067] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2694.577532] env[62875]: DEBUG oslo_vmware.api [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180790, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2694.706406] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2695.079907] env[62875]: DEBUG oslo_vmware.api [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180790, 'name': PowerOffVM_Task, 'duration_secs': 0.17745} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2695.080243] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2695.080392] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2695.080590] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd687d4c-c0f7-401c-8aa6-3d1670ebb37f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2696.701264] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2696.705873] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2697.706169] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2700.707773] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2702.499048] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2702.499407] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Deleting contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2702.499407] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleting the datastore file [datastore1] a9cc9da5-b40c-492d-92a5-85e760290be9 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2702.499534] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c76ce416-d34d-4e45-9484-c25c803e4496 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.507562] env[62875]: DEBUG oslo_vmware.api [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2702.507562] env[62875]: value = "task-2180792" [ 2702.507562] env[62875]: _type = "Task" [ 2702.507562] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2702.514920] env[62875]: DEBUG oslo_vmware.api [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2702.706978] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2703.017508] env[62875]: DEBUG oslo_vmware.api [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140731} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2703.017726] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2703.017909] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Deleted contents of the VM from datastore datastore1 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2703.018100] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2703.018282] env[62875]: INFO nova.compute.manager [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Took 8.46 seconds to destroy the instance on the hypervisor. [ 2703.018519] env[62875]: DEBUG oslo.service.loopingcall [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2703.018702] env[62875]: DEBUG nova.compute.manager [-] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2703.018805] env[62875]: DEBUG nova.network.neutron [-] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2703.242105] env[62875]: DEBUG nova.compute.manager [req-389bc9a3-98c7-4807-8e94-dc15f1a21442 req-80fda050-97a4-4f24-a8ae-c3f4398e51b0 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Received event network-vif-deleted-6ca62ab4-73e8-4b45-b05b-6e807a8a2515 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2703.242318] env[62875]: INFO nova.compute.manager [req-389bc9a3-98c7-4807-8e94-dc15f1a21442 req-80fda050-97a4-4f24-a8ae-c3f4398e51b0 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Neutron deleted interface 6ca62ab4-73e8-4b45-b05b-6e807a8a2515; detaching it from the instance and deleting it from the info cache [ 2703.242502] env[62875]: DEBUG nova.network.neutron [req-389bc9a3-98c7-4807-8e94-dc15f1a21442 req-80fda050-97a4-4f24-a8ae-c3f4398e51b0 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2703.727322] env[62875]: DEBUG nova.network.neutron [-] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2703.744957] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30966b78-849d-4087-8938-4d23064b0a87 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.755028] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e162e52-dd6a-46ec-80d9-a4dce4ef5626 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.777712] env[62875]: DEBUG nova.compute.manager [req-389bc9a3-98c7-4807-8e94-dc15f1a21442 req-80fda050-97a4-4f24-a8ae-c3f4398e51b0 service nova] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Detach interface failed, port_id=6ca62ab4-73e8-4b45-b05b-6e807a8a2515, reason: Instance a9cc9da5-b40c-492d-92a5-85e760290be9 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2704.230115] env[62875]: INFO nova.compute.manager [-] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Took 1.21 seconds to deallocate network for instance. [ 2704.737559] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2704.737903] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2704.738063] env[62875]: DEBUG nova.objects.instance [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lazy-loading 'resources' on Instance uuid a9cc9da5-b40c-492d-92a5-85e760290be9 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2705.420777] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef774f6-03c5-494d-aa49-62a39d970123 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2705.428521] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4095c48c-2bcc-4db1-b6a8-24fa8d900255 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2705.457657] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4902b190-655f-4f79-bded-a8c7a8a9cc24 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2705.465324] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07ed177-4ffc-49a3-a1f5-dea0afcdee18 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2705.479177] env[62875]: DEBUG nova.compute.provider_tree [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2705.982505] env[62875]: DEBUG nova.scheduler.client.report [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2706.487428] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.749s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2706.507912] env[62875]: INFO nova.scheduler.client.report [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleted allocations for instance a9cc9da5-b40c-492d-92a5-85e760290be9 [ 2707.015845] env[62875]: DEBUG oslo_concurrency.lockutils [None req-dfeb4197-9fd3-4a00-a851-b1225033e35a tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a9cc9da5-b40c-492d-92a5-85e760290be9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.969s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2707.227446] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2707.227741] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2707.227960] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2707.228166] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2707.228356] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2707.230695] env[62875]: INFO nova.compute.manager [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Terminating instance [ 2707.705897] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2707.706088] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2707.706220] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 2707.734501] env[62875]: DEBUG nova.compute.manager [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2707.734797] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2707.735694] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b41f65-bdfa-42c3-a042-73fd84bc0c25 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.743515] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2707.743728] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0221900b-ea59-4f66-bfcd-7c54b3784deb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.750058] env[62875]: DEBUG oslo_vmware.api [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2707.750058] env[62875]: value = "task-2180793" [ 2707.750058] env[62875]: _type = "Task" [ 2707.750058] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2707.757555] env[62875]: DEBUG oslo_vmware.api [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2708.209951] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Skipping network cache update for instance because it is being deleted. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 2708.210307] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 2708.210421] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2708.259995] env[62875]: DEBUG oslo_vmware.api [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180793, 'name': PowerOffVM_Task, 'duration_secs': 0.176368} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2708.260654] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2708.260654] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2708.260654] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce60665e-a436-4db3-a461-87c3c8c30cb5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.495788] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2708.496035] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2708.496211] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleting the datastore file [datastore2] a5f9b278-6c02-4d5e-997a-97a8fa8944ca {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2708.496470] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f747eda0-a224-46d7-bf64-83353fe680b3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.502901] env[62875]: DEBUG oslo_vmware.api [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for the task: (returnval){ [ 2708.502901] env[62875]: value = "task-2180795" [ 2708.502901] env[62875]: _type = "Task" [ 2708.502901] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2708.509928] env[62875]: DEBUG oslo_vmware.api [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2708.713326] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2708.713609] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2708.713798] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.713980] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2708.714856] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ef2580-ddea-4145-90d1-1d9c200dfa7a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.724199] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cec1f95-28c4-4ed6-a7eb-56dff1bbf88b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.738162] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a137dc71-c46e-4c29-a0c0-fc3bc5881dff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.744426] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f51d56-a3d7-4b6b-96c2-ff969dc06c88 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.772752] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180076MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2708.772892] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2708.773097] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2709.013240] env[62875]: DEBUG oslo_vmware.api [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Task: {'id': task-2180795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136969} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2709.013516] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2709.013701] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2709.013875] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2709.014062] env[62875]: INFO nova.compute.manager [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Took 1.28 seconds to destroy the instance on the hypervisor. [ 2709.014306] env[62875]: DEBUG oslo.service.loopingcall [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2709.014537] env[62875]: DEBUG nova.compute.manager [-] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2709.014664] env[62875]: DEBUG nova.network.neutron [-] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2709.468657] env[62875]: DEBUG nova.compute.manager [req-ed13424f-02fa-4704-a0ab-1d8cefaeb9f9 req-c231d23e-ed3c-4e3a-9655-c7a9af3b665b service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Received event network-vif-deleted-473d8504-6b5c-40ab-becc-e8cc4c9ea748 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2709.468913] env[62875]: INFO nova.compute.manager [req-ed13424f-02fa-4704-a0ab-1d8cefaeb9f9 req-c231d23e-ed3c-4e3a-9655-c7a9af3b665b service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Neutron deleted interface 473d8504-6b5c-40ab-becc-e8cc4c9ea748; detaching it from the instance and deleting it from the info cache [ 2709.469037] env[62875]: DEBUG nova.network.neutron [req-ed13424f-02fa-4704-a0ab-1d8cefaeb9f9 req-c231d23e-ed3c-4e3a-9655-c7a9af3b665b service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2709.798079] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance a5f9b278-6c02-4d5e-997a-97a8fa8944ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2709.798323] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2709.798449] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2709.825981] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefa871b-90cf-487e-88fc-d46637ed9d41 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.835275] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f047cf-f42c-4426-9cc9-c2af530b6431 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.865184] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f64dd99-849d-435f-ac7b-f9a5d95b4f40 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.872684] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bae071-c2e7-474c-93c1-6fbc383d6802 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.885627] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2709.952149] env[62875]: DEBUG nova.network.neutron [-] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2709.972055] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb55023b-99b2-4238-95f6-04c1b032ae30 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.981396] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bc8cc3-15c5-4ef9-af32-9c9b909d603f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2710.003490] env[62875]: DEBUG nova.compute.manager [req-ed13424f-02fa-4704-a0ab-1d8cefaeb9f9 req-c231d23e-ed3c-4e3a-9655-c7a9af3b665b service nova] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Detach interface failed, port_id=473d8504-6b5c-40ab-becc-e8cc4c9ea748, reason: Instance a5f9b278-6c02-4d5e-997a-97a8fa8944ca could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2710.388559] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2710.455033] env[62875]: INFO nova.compute.manager [-] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Took 1.44 seconds to deallocate network for instance. [ 2710.893827] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2710.894231] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.121s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2710.961435] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2710.961697] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2710.961917] env[62875]: DEBUG nova.objects.instance [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lazy-loading 'resources' on Instance uuid a5f9b278-6c02-4d5e-997a-97a8fa8944ca {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2711.490284] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a43fd5-8a80-4a9d-9497-1a40d8163266 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2711.497965] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05342b36-aa06-4194-9b54-c85b4adac5b3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2711.527419] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fff560-330a-42ae-a764-4f176c37fb19 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2711.535012] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37ff076-4b3c-4be8-bae4-fbf3fd181966 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2711.548206] env[62875]: DEBUG nova.compute.provider_tree [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2712.050880] env[62875]: DEBUG nova.scheduler.client.report [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2712.555623] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.594s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2712.573520] env[62875]: INFO nova.scheduler.client.report [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Deleted allocations for instance a5f9b278-6c02-4d5e-997a-97a8fa8944ca [ 2713.083626] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8c1ec45c-2561-4be8-b545-305ea1d6e907 tempest-AttachInterfacesTestJSON-699051156 tempest-AttachInterfacesTestJSON-699051156-project-member] Lock "a5f9b278-6c02-4d5e-997a-97a8fa8944ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.856s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2719.395899] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2719.396151] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2719.889888] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2719.898990] env[62875]: DEBUG nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2720.418784] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2720.419097] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2720.420685] env[62875]: INFO nova.compute.claims [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2721.456834] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263abafd-74d4-4a90-9c88-f8107b11f040 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.464609] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b9942d-6522-47bd-aa09-5fddc0afd0f5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.495452] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cced7d-abdb-4a24-8220-fe06594479e2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.502263] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d11279-bf0f-46f4-b369-f98085c552ea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.515109] env[62875]: DEBUG nova.compute.provider_tree [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2722.019298] env[62875]: DEBUG nova.scheduler.client.report [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2722.523757] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.105s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2722.524444] env[62875]: DEBUG nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2723.029575] env[62875]: DEBUG nova.compute.utils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2723.030986] env[62875]: DEBUG nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2723.031166] env[62875]: DEBUG nova.network.neutron [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2723.072082] env[62875]: DEBUG nova.policy [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e57a483ae3d479e8fb30feb3e0b8310', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccc96aead000465a9613e6bb73d31721', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2723.331136] env[62875]: DEBUG nova.network.neutron [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Successfully created port: 5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2723.534484] env[62875]: DEBUG nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2724.545893] env[62875]: DEBUG nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2724.570738] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2724.571021] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2724.571188] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2724.571372] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2724.571519] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2724.571666] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2724.571869] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2724.572050] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2724.572255] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2724.572421] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2724.572598] env[62875]: DEBUG nova.virt.hardware [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2724.573489] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab68656-3403-4e2c-8555-427352293c39 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2724.581523] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359472b4-ed20-4094-b28b-0148a532ee62 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2724.690347] env[62875]: DEBUG nova.compute.manager [req-af735980-7c86-47ae-a732-9dc5a1f4f0ca req-ae30a864-42e2-4f09-8f69-8970952b7915 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received event network-vif-plugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2724.690575] env[62875]: DEBUG oslo_concurrency.lockutils [req-af735980-7c86-47ae-a732-9dc5a1f4f0ca req-ae30a864-42e2-4f09-8f69-8970952b7915 service nova] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2724.690782] env[62875]: DEBUG oslo_concurrency.lockutils [req-af735980-7c86-47ae-a732-9dc5a1f4f0ca req-ae30a864-42e2-4f09-8f69-8970952b7915 service nova] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2724.690970] env[62875]: DEBUG oslo_concurrency.lockutils [req-af735980-7c86-47ae-a732-9dc5a1f4f0ca req-ae30a864-42e2-4f09-8f69-8970952b7915 service nova] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2724.691122] env[62875]: DEBUG nova.compute.manager [req-af735980-7c86-47ae-a732-9dc5a1f4f0ca req-ae30a864-42e2-4f09-8f69-8970952b7915 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] No waiting events found dispatching network-vif-plugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2724.691286] env[62875]: WARNING nova.compute.manager [req-af735980-7c86-47ae-a732-9dc5a1f4f0ca req-ae30a864-42e2-4f09-8f69-8970952b7915 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received unexpected event network-vif-plugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 for instance with vm_state building and task_state spawning. [ 2724.774859] env[62875]: DEBUG nova.network.neutron [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Successfully updated port: 5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2725.277784] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2725.277925] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2725.278094] env[62875]: DEBUG nova.network.neutron [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2725.809324] env[62875]: DEBUG nova.network.neutron [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2725.927926] env[62875]: DEBUG nova.network.neutron [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2726.430882] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2726.431247] env[62875]: DEBUG nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Instance network_info: |[{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2726.431680] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:9b:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a51b325-2a9b-4c76-8da9-9fa0817d61e9', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2726.439119] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating folder: Project (ccc96aead000465a9613e6bb73d31721). Parent ref: group-v444854. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2726.439391] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ba0d6f2-5d3d-4206-a196-12b2a30e0870 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2726.453906] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Created folder: Project (ccc96aead000465a9613e6bb73d31721) in parent group-v444854. [ 2726.454101] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating folder: Instances. Parent ref: group-v445030. {{(pid=62875) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2726.454329] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4eed85fd-197e-419a-9ea7-8dd7c2ecf97f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2726.463516] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Created folder: Instances in parent group-v445030. [ 2726.463770] env[62875]: DEBUG oslo.service.loopingcall [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2726.463961] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2726.464166] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0639fa55-1221-46e7-ab7d-f68d696eeb1e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2726.482426] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2726.482426] env[62875]: value = "task-2180798" [ 2726.482426] env[62875]: _type = "Task" [ 2726.482426] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2726.489438] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180798, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2726.717456] env[62875]: DEBUG nova.compute.manager [req-a6d7fa48-d1d7-4cfa-8a28-641a0b762a7c req-48ece4c6-257e-4a26-be84-0457c56c67b7 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received event network-changed-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2726.717716] env[62875]: DEBUG nova.compute.manager [req-a6d7fa48-d1d7-4cfa-8a28-641a0b762a7c req-48ece4c6-257e-4a26-be84-0457c56c67b7 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Refreshing instance network info cache due to event network-changed-5a51b325-2a9b-4c76-8da9-9fa0817d61e9. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2726.717959] env[62875]: DEBUG oslo_concurrency.lockutils [req-a6d7fa48-d1d7-4cfa-8a28-641a0b762a7c req-48ece4c6-257e-4a26-be84-0457c56c67b7 service nova] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2726.718069] env[62875]: DEBUG oslo_concurrency.lockutils [req-a6d7fa48-d1d7-4cfa-8a28-641a0b762a7c req-48ece4c6-257e-4a26-be84-0457c56c67b7 service nova] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2726.718234] env[62875]: DEBUG nova.network.neutron [req-a6d7fa48-d1d7-4cfa-8a28-641a0b762a7c req-48ece4c6-257e-4a26-be84-0457c56c67b7 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Refreshing network info cache for port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2726.992697] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180798, 'name': CreateVM_Task, 'duration_secs': 0.399706} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2726.993101] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2726.993532] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2726.993701] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2726.994034] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2726.994274] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdb3757a-ea2f-462b-a7dd-f85e4f6628ee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2726.998311] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2726.998311] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ee44aa-da0e-fb14-9285-e109b1d47668" [ 2726.998311] env[62875]: _type = "Task" [ 2726.998311] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2727.005459] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ee44aa-da0e-fb14-9285-e109b1d47668, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2727.397393] env[62875]: DEBUG nova.network.neutron [req-a6d7fa48-d1d7-4cfa-8a28-641a0b762a7c req-48ece4c6-257e-4a26-be84-0457c56c67b7 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updated VIF entry in instance network info cache for port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2727.397789] env[62875]: DEBUG nova.network.neutron [req-a6d7fa48-d1d7-4cfa-8a28-641a0b762a7c req-48ece4c6-257e-4a26-be84-0457c56c67b7 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2727.508286] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ee44aa-da0e-fb14-9285-e109b1d47668, 'name': SearchDatastore_Task, 'duration_secs': 0.009503} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2727.508555] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2727.508792] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2727.509034] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2727.509186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2727.509368] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2727.509616] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86c0a238-47e1-4344-9b53-377b8292203a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2727.517406] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2727.517605] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2727.518305] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85470f7a-3c66-4a12-98c4-7c8081c35a1c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2727.523245] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2727.523245] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f96a9e-5e55-0aa2-8aa5-b1353416493d" [ 2727.523245] env[62875]: _type = "Task" [ 2727.523245] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2727.530712] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f96a9e-5e55-0aa2-8aa5-b1353416493d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2727.900397] env[62875]: DEBUG oslo_concurrency.lockutils [req-a6d7fa48-d1d7-4cfa-8a28-641a0b762a7c req-48ece4c6-257e-4a26-be84-0457c56c67b7 service nova] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2728.034131] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f96a9e-5e55-0aa2-8aa5-b1353416493d, 'name': SearchDatastore_Task, 'duration_secs': 0.008466} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2728.034895] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26c00289-5b3d-4c12-9d8c-3bf4aa3b0e5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.040100] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2728.040100] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252db2d-7465-05f3-d4e6-01bae9d69892" [ 2728.040100] env[62875]: _type = "Task" [ 2728.040100] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2728.047173] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252db2d-7465-05f3-d4e6-01bae9d69892, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2728.551198] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5252db2d-7465-05f3-d4e6-01bae9d69892, 'name': SearchDatastore_Task, 'duration_secs': 0.00939} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2728.551450] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2728.551698] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2/2e1129fe-f32c-4868-bc68-a39ea14fe9d2.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2728.551947] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22a51692-4210-468a-a1f6-c65b798596e9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.558315] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2728.558315] env[62875]: value = "task-2180799" [ 2728.558315] env[62875]: _type = "Task" [ 2728.558315] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2728.565737] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2729.069117] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180799, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.413833} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2729.069117] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2/2e1129fe-f32c-4868-bc68-a39ea14fe9d2.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2729.069117] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2729.069117] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8fce4e8-8c1b-4eed-aeb7-e45b17df9c4f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2729.075246] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2729.075246] env[62875]: value = "task-2180800" [ 2729.075246] env[62875]: _type = "Task" [ 2729.075246] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2729.082421] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180800, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2729.585277] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05678} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2729.585550] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2729.586312] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c84d921-08cd-44aa-b8d8-b5b624e53c4c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2729.609834] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2/2e1129fe-f32c-4868-bc68-a39ea14fe9d2.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2729.610081] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2fcc36c-1751-48ca-a341-045becc1fe45 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2729.639082] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2729.639082] env[62875]: value = "task-2180801" [ 2729.639082] env[62875]: _type = "Task" [ 2729.639082] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2729.646558] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2730.149147] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180801, 'name': ReconfigVM_Task, 'duration_secs': 0.269904} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2730.149537] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2/2e1129fe-f32c-4868-bc68-a39ea14fe9d2.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2730.150044] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50324576-4849-40ec-877d-f80813c9df8e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2730.156206] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2730.156206] env[62875]: value = "task-2180802" [ 2730.156206] env[62875]: _type = "Task" [ 2730.156206] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2730.166454] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180802, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2730.666100] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180802, 'name': Rename_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2731.167519] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180802, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2731.668263] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180802, 'name': Rename_Task} progress is 99%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2732.168181] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180802, 'name': Rename_Task, 'duration_secs': 1.836177} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2732.168754] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2732.168754] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-665eb39e-0395-44bc-8c15-2ad336b4fc08 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.174859] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2732.174859] env[62875]: value = "task-2180803" [ 2732.174859] env[62875]: _type = "Task" [ 2732.174859] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2732.182187] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2732.684520] env[62875]: DEBUG oslo_vmware.api [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180803, 'name': PowerOnVM_Task, 'duration_secs': 0.471314} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2732.684831] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2732.684991] env[62875]: INFO nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Took 8.14 seconds to spawn the instance on the hypervisor. [ 2732.685197] env[62875]: DEBUG nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2732.685991] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5858f24e-8476-444a-a36c-446b1dc41fe1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.206777] env[62875]: INFO nova.compute.manager [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Took 12.80 seconds to build instance. [ 2733.509357] env[62875]: DEBUG nova.compute.manager [req-d2f808b9-7992-4a00-bf04-aefb318c1f99 req-4b48b0eb-bb41-4ad9-8596-08d141435484 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received event network-changed-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2733.509637] env[62875]: DEBUG nova.compute.manager [req-d2f808b9-7992-4a00-bf04-aefb318c1f99 req-4b48b0eb-bb41-4ad9-8596-08d141435484 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Refreshing instance network info cache due to event network-changed-5a51b325-2a9b-4c76-8da9-9fa0817d61e9. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2733.509930] env[62875]: DEBUG oslo_concurrency.lockutils [req-d2f808b9-7992-4a00-bf04-aefb318c1f99 req-4b48b0eb-bb41-4ad9-8596-08d141435484 service nova] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2733.510215] env[62875]: DEBUG oslo_concurrency.lockutils [req-d2f808b9-7992-4a00-bf04-aefb318c1f99 req-4b48b0eb-bb41-4ad9-8596-08d141435484 service nova] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2733.510561] env[62875]: DEBUG nova.network.neutron [req-d2f808b9-7992-4a00-bf04-aefb318c1f99 req-4b48b0eb-bb41-4ad9-8596-08d141435484 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Refreshing network info cache for port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2733.708797] env[62875]: DEBUG oslo_concurrency.lockutils [None req-e63cab43-0daa-48e3-873d-497248a93188 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.312s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2734.266436] env[62875]: DEBUG nova.network.neutron [req-d2f808b9-7992-4a00-bf04-aefb318c1f99 req-4b48b0eb-bb41-4ad9-8596-08d141435484 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updated VIF entry in instance network info cache for port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2734.266782] env[62875]: DEBUG nova.network.neutron [req-d2f808b9-7992-4a00-bf04-aefb318c1f99 req-4b48b0eb-bb41-4ad9-8596-08d141435484 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2734.770123] env[62875]: DEBUG oslo_concurrency.lockutils [req-d2f808b9-7992-4a00-bf04-aefb318c1f99 req-4b48b0eb-bb41-4ad9-8596-08d141435484 service nova] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2754.706589] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2754.706985] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2756.707070] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2757.706419] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2757.706655] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2758.702365] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2761.708862] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2764.706829] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2767.706263] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2767.706683] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2767.706683] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 2768.237687] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2768.237834] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2768.238049] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2768.238223] env[62875]: DEBUG nova.objects.instance [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lazy-loading 'info_cache' on Instance uuid 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2769.951322] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2770.454294] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2770.454521] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2770.454757] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2770.958331] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2770.958721] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2770.958721] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2770.958926] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2770.959867] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a53285e-78d6-4441-a285-672f3fdddd76 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2770.969287] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d189b0-3ba8-456a-862e-d7ebd996bd07 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2770.983661] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a958fd6a-5990-4d29-a7b0-09bf086065bd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2770.990179] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5b7b06-fdec-499c-88dd-1fcd81939cf0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2771.018924] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180825MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2771.019073] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2771.019292] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2772.044466] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2772.044714] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2772.044823] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2772.071161] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf951311-edde-4686-8f0a-51057dd6c717 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.079284] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4691c5-6c19-43dd-a26e-386aaa36e7dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.111094] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be30620b-580a-437b-9392-163bf3557c3c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.118251] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0383e47-98b2-4eed-9c00-c5b4729d3b7c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.131667] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2772.634759] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2772.807526] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2772.807726] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2772.807905] env[62875]: INFO nova.compute.manager [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Shelving [ 2773.139290] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2773.139728] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.120s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2773.816938] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2773.817251] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34464288-e738-48ed-b1c0-63c7d16ae869 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.824716] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2773.824716] env[62875]: value = "task-2180804" [ 2773.824716] env[62875]: _type = "Task" [ 2773.824716] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2773.832440] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180804, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2774.334640] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180804, 'name': PowerOffVM_Task, 'duration_secs': 0.168436} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2774.335049] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2774.335712] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed96f1f-7b44-43f9-9871-ff25b59490d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2774.355843] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24262b4-31c2-42f1-b67d-7f153d6d9d76 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2774.866750] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2774.867114] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2b0f8db1-9669-4225-b8fc-380fc3e04177 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2774.875868] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2774.875868] env[62875]: value = "task-2180805" [ 2774.875868] env[62875]: _type = "Task" [ 2774.875868] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2774.883862] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180805, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2775.386328] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180805, 'name': CreateSnapshot_Task, 'duration_secs': 0.399608} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2775.386723] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2775.387513] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf44b99b-6b18-4ac9-b14f-1bc69dc7d2ea {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2775.904831] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2775.905146] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c182d561-eeb0-44cf-b5fb-0679109d97cb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2775.913547] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2775.913547] env[62875]: value = "task-2180806" [ 2775.913547] env[62875]: _type = "Task" [ 2775.913547] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2775.921332] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180806, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2776.423215] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180806, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2776.923954] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180806, 'name': CloneVM_Task} progress is 95%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2777.424153] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180806, 'name': CloneVM_Task, 'duration_secs': 1.10342} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2777.424576] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Created linked-clone VM from snapshot [ 2777.425125] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb837a6-6179-4204-97b2-ba567c3936bd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.432097] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Uploading image c28dfddf-e555-419c-86a5-ca1247af5c36 {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2777.451850] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2777.451850] env[62875]: value = "vm-445034" [ 2777.451850] env[62875]: _type = "VirtualMachine" [ 2777.451850] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2777.452103] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-97622f11-683c-4217-a403-0280646a8487 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.458747] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lease: (returnval){ [ 2777.458747] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52805c86-446b-642a-386f-b9f2b342a7b8" [ 2777.458747] env[62875]: _type = "HttpNfcLease" [ 2777.458747] env[62875]: } obtained for exporting VM: (result){ [ 2777.458747] env[62875]: value = "vm-445034" [ 2777.458747] env[62875]: _type = "VirtualMachine" [ 2777.458747] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2777.458975] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the lease: (returnval){ [ 2777.458975] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52805c86-446b-642a-386f-b9f2b342a7b8" [ 2777.458975] env[62875]: _type = "HttpNfcLease" [ 2777.458975] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2777.464718] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2777.464718] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52805c86-446b-642a-386f-b9f2b342a7b8" [ 2777.464718] env[62875]: _type = "HttpNfcLease" [ 2777.464718] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2777.967311] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2777.967311] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52805c86-446b-642a-386f-b9f2b342a7b8" [ 2777.967311] env[62875]: _type = "HttpNfcLease" [ 2777.967311] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2777.967604] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2777.967604] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52805c86-446b-642a-386f-b9f2b342a7b8" [ 2777.967604] env[62875]: _type = "HttpNfcLease" [ 2777.967604] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2777.968363] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3da0f82-834b-451b-9e41-ca1cb75d856e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.975304] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f82b8-f901-228e-bac9-c8467aa76fa5/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2777.975476] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f82b8-f901-228e-bac9-c8467aa76fa5/disk-0.vmdk for reading. {{(pid=62875) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2778.060386] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4a0fe59c-87d8-4461-8f4c-715481b050aa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2784.902341] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f82b8-f901-228e-bac9-c8467aa76fa5/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2784.903322] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeca5dba-abf5-4393-b5ba-b6bcbcea2c49 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2784.909617] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f82b8-f901-228e-bac9-c8467aa76fa5/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2784.909782] env[62875]: ERROR oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f82b8-f901-228e-bac9-c8467aa76fa5/disk-0.vmdk due to incomplete transfer. [ 2784.909989] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-96db4bb7-83a3-4f8b-b9b5-4c66b5d22990 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2784.917385] env[62875]: DEBUG oslo_vmware.rw_handles [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f82b8-f901-228e-bac9-c8467aa76fa5/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2784.917576] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Uploaded image c28dfddf-e555-419c-86a5-ca1247af5c36 to the Glance image server {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2784.919919] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2784.920148] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9af98545-6fe1-41cb-b08c-5432d232e6e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2784.925631] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2784.925631] env[62875]: value = "task-2180808" [ 2784.925631] env[62875]: _type = "Task" [ 2784.925631] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2784.934264] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180808, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2785.435677] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180808, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2785.936165] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180808, 'name': Destroy_Task, 'duration_secs': 0.557326} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2785.936533] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Destroyed the VM [ 2785.936660] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2785.936893] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7cb2f376-a292-435e-a109-6cbfbc4dd1e3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2785.943037] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2785.943037] env[62875]: value = "task-2180809" [ 2785.943037] env[62875]: _type = "Task" [ 2785.943037] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2785.950991] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180809, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2786.453536] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180809, 'name': RemoveSnapshot_Task, 'duration_secs': 0.375386} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2786.453781] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2786.454075] env[62875]: DEBUG nova.compute.manager [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2786.454810] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ebe0af-af30-4e71-a9c1-3efa7a56f6ee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2786.966975] env[62875]: INFO nova.compute.manager [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Shelve offloading [ 2787.470796] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2787.471113] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dc233e0-2d30-40dc-a822-47b7cf7dfe7a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2787.479322] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2787.479322] env[62875]: value = "task-2180810" [ 2787.479322] env[62875]: _type = "Task" [ 2787.479322] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2787.487416] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] VM already powered off {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2787.487612] env[62875]: DEBUG nova.compute.manager [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2787.488328] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec6ab38-789f-4566-bacd-54f7d2af9925 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2787.493832] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2787.494010] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2787.494184] env[62875]: DEBUG nova.network.neutron [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2788.201993] env[62875]: DEBUG nova.network.neutron [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2788.705071] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2789.196835] env[62875]: DEBUG nova.compute.manager [req-79695b4b-d9a4-4989-905a-aeee8e373f1c req-1193dd43-234c-4574-8b34-97de69d65895 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received event network-vif-unplugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2789.197050] env[62875]: DEBUG oslo_concurrency.lockutils [req-79695b4b-d9a4-4989-905a-aeee8e373f1c req-1193dd43-234c-4574-8b34-97de69d65895 service nova] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2789.197268] env[62875]: DEBUG oslo_concurrency.lockutils [req-79695b4b-d9a4-4989-905a-aeee8e373f1c req-1193dd43-234c-4574-8b34-97de69d65895 service nova] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2789.197480] env[62875]: DEBUG oslo_concurrency.lockutils [req-79695b4b-d9a4-4989-905a-aeee8e373f1c req-1193dd43-234c-4574-8b34-97de69d65895 service nova] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2789.197663] env[62875]: DEBUG nova.compute.manager [req-79695b4b-d9a4-4989-905a-aeee8e373f1c req-1193dd43-234c-4574-8b34-97de69d65895 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] No waiting events found dispatching network-vif-unplugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2789.197833] env[62875]: WARNING nova.compute.manager [req-79695b4b-d9a4-4989-905a-aeee8e373f1c req-1193dd43-234c-4574-8b34-97de69d65895 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received unexpected event network-vif-unplugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 for instance with vm_state shelved and task_state shelving_offloading. [ 2789.271703] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2789.272622] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1e4ac0-0283-4581-95d4-1787d7797119 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2789.280500] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2789.280712] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dcca087-a3d2-47fd-af71-6bf2dfd99f68 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2791.222716] env[62875]: DEBUG nova.compute.manager [req-ef88973d-c969-4cd8-a7de-aefdba070776 req-0e5cffc7-c32a-4754-baf1-aa075760a137 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received event network-changed-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2791.222890] env[62875]: DEBUG nova.compute.manager [req-ef88973d-c969-4cd8-a7de-aefdba070776 req-0e5cffc7-c32a-4754-baf1-aa075760a137 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Refreshing instance network info cache due to event network-changed-5a51b325-2a9b-4c76-8da9-9fa0817d61e9. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2791.223091] env[62875]: DEBUG oslo_concurrency.lockutils [req-ef88973d-c969-4cd8-a7de-aefdba070776 req-0e5cffc7-c32a-4754-baf1-aa075760a137 service nova] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2791.223243] env[62875]: DEBUG oslo_concurrency.lockutils [req-ef88973d-c969-4cd8-a7de-aefdba070776 req-0e5cffc7-c32a-4754-baf1-aa075760a137 service nova] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2791.223404] env[62875]: DEBUG nova.network.neutron [req-ef88973d-c969-4cd8-a7de-aefdba070776 req-0e5cffc7-c32a-4754-baf1-aa075760a137 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Refreshing network info cache for port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2791.914658] env[62875]: DEBUG nova.network.neutron [req-ef88973d-c969-4cd8-a7de-aefdba070776 req-0e5cffc7-c32a-4754-baf1-aa075760a137 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updated VIF entry in instance network info cache for port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2791.915031] env[62875]: DEBUG nova.network.neutron [req-ef88973d-c969-4cd8-a7de-aefdba070776 req-0e5cffc7-c32a-4754-baf1-aa075760a137 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap5a51b325-2a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2792.418235] env[62875]: DEBUG oslo_concurrency.lockutils [req-ef88973d-c969-4cd8-a7de-aefdba070776 req-0e5cffc7-c32a-4754-baf1-aa075760a137 service nova] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2797.124934] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2797.125338] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2797.125338] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleting the datastore file [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2797.125539] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51adeef2-ef30-478b-9aa5-5ff5e9e9b963 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2797.132427] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2797.132427] env[62875]: value = "task-2180812" [ 2797.132427] env[62875]: _type = "Task" [ 2797.132427] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2797.140378] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180812, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2797.642933] env[62875]: DEBUG oslo_vmware.api [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180812, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124385} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2797.643188] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2797.643379] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2797.643551] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2797.667150] env[62875]: INFO nova.scheduler.client.report [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted allocations for instance 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 [ 2798.172111] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2798.172419] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2798.172650] env[62875]: DEBUG nova.objects.instance [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'resources' on Instance uuid 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2798.675775] env[62875]: DEBUG nova.objects.instance [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'numa_topology' on Instance uuid 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2799.178286] env[62875]: DEBUG nova.objects.base [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Object Instance<2e1129fe-f32c-4868-bc68-a39ea14fe9d2> lazy-loaded attributes: resources,numa_topology {{(pid=62875) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2799.208398] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd1f919-62d2-409c-9ba9-687e5883eabf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.215735] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b729191-2c73-44be-bd89-d1ab1ac06e7d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.248010] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2523df6-57da-4340-8dc9-12d28c65df0f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.255170] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504bd69b-03f3-4b68-ba18-d2f80a3b7acc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.268195] env[62875]: DEBUG nova.compute.provider_tree [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2799.445835] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2799.771236] env[62875]: DEBUG nova.scheduler.client.report [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2800.276607] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2800.785567] env[62875]: DEBUG oslo_concurrency.lockutils [None req-9f3c2b81-2e15-4875-8cba-b13afdeccb21 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.978s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2800.786445] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.341s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2800.786631] env[62875]: INFO nova.compute.manager [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Unshelving [ 2801.795887] env[62875]: DEBUG nova.compute.utils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2802.298970] env[62875]: INFO nova.virt.block_device [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Booting with volume 8e7af612-749b-4036-8ba9-d4ba902716aa at /dev/sdb [ 2802.333562] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9c56a76-d92d-4c93-a78f-8072ded0ad09 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2802.343265] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c6beed-6d70-46ce-9f5d-676bb0996249 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2802.365375] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c75fa9c-a87a-48ab-bf43-ea183fc41fdf {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2802.373303] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f90c53-d8e4-4de6-9435-b16ad5f02f96 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2802.395376] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f002ec-9786-4397-aa33-c11d4f1f0d0a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2802.401255] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e085a2e-cb5b-4964-9f35-db8cf78e51a0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2802.413808] env[62875]: DEBUG nova.virt.block_device [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating existing volume attachment record: 89c3b316-e682-45b5-9f2c-ebfe6aa0e6f1 {{(pid=62875) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2808.005307] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2808.005569] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2808.005777] env[62875]: DEBUG nova.objects.instance [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'pci_requests' on Instance uuid 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2808.509665] env[62875]: DEBUG nova.objects.instance [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'numa_topology' on Instance uuid 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2809.012064] env[62875]: INFO nova.compute.claims [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2810.045931] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ac4d20-bbcf-45b0-bab7-1a3296fac2b4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.053544] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805b403f-5d81-4e6c-a67d-8441567339db {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.083684] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931e0599-887a-4878-89e4-6c57a0b05c31 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.090155] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992cc95b-8744-4529-a665-217abaef9a91 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.102723] env[62875]: DEBUG nova.compute.provider_tree [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2810.606186] env[62875]: DEBUG nova.scheduler.client.report [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2811.111031] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.105s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2811.140770] env[62875]: INFO nova.network.neutron [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2816.071229] env[62875]: DEBUG nova.compute.manager [req-82958847-7ea4-4615-bc79-66f5729861f2 req-59e806e2-66c7-4754-a839-afa6901d6f11 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received event network-vif-plugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2816.071481] env[62875]: DEBUG oslo_concurrency.lockutils [req-82958847-7ea4-4615-bc79-66f5729861f2 req-59e806e2-66c7-4754-a839-afa6901d6f11 service nova] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2816.071703] env[62875]: DEBUG oslo_concurrency.lockutils [req-82958847-7ea4-4615-bc79-66f5729861f2 req-59e806e2-66c7-4754-a839-afa6901d6f11 service nova] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2816.071872] env[62875]: DEBUG oslo_concurrency.lockutils [req-82958847-7ea4-4615-bc79-66f5729861f2 req-59e806e2-66c7-4754-a839-afa6901d6f11 service nova] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2816.072063] env[62875]: DEBUG nova.compute.manager [req-82958847-7ea4-4615-bc79-66f5729861f2 req-59e806e2-66c7-4754-a839-afa6901d6f11 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] No waiting events found dispatching network-vif-plugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2816.072190] env[62875]: WARNING nova.compute.manager [req-82958847-7ea4-4615-bc79-66f5729861f2 req-59e806e2-66c7-4754-a839-afa6901d6f11 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received unexpected event network-vif-plugged-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 for instance with vm_state shelved_offloaded and task_state spawning. [ 2816.157253] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2816.157529] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2816.157724] env[62875]: DEBUG nova.network.neutron [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2816.855491] env[62875]: DEBUG nova.network.neutron [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2817.358690] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2817.384960] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c59da42fd77e628ed34b789c1eb990d3',container_format='bare',created_at=2025-01-25T05:27:17Z,direct_url=,disk_format='vmdk',id=c28dfddf-e555-419c-86a5-ca1247af5c36,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1701488975-shelved',owner='ccc96aead000465a9613e6bb73d31721',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-01-25T05:27:29Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2817.385230] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2817.385388] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2817.385567] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2817.385714] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2817.385861] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2817.386079] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2817.386241] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2817.386412] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2817.386609] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2817.386794] env[62875]: DEBUG nova.virt.hardware [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2817.387693] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf04e8ab-c3d5-4805-9e61-4ecd14651fd5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.397330] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a001f564-83a9-44b6-abec-a72b66cde820 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.417505] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:9b:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a51b325-2a9b-4c76-8da9-9fa0817d61e9', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2817.430257] env[62875]: DEBUG oslo.service.loopingcall [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2817.430581] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2817.430881] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fec6b8d-9c35-401a-b8b7-7b0211649cc0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.453010] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2817.453010] env[62875]: value = "task-2180817" [ 2817.453010] env[62875]: _type = "Task" [ 2817.453010] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2817.460121] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180817, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2817.962526] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180817, 'name': CreateVM_Task, 'duration_secs': 0.376693} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2817.962715] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2817.963453] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2817.963675] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2817.964114] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2817.964405] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-288c8004-5654-4b0e-bc62-63925793ed12 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.968738] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2817.968738] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52debf34-03e9-1c5f-033c-37eb34401b52" [ 2817.968738] env[62875]: _type = "Task" [ 2817.968738] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2817.976330] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52debf34-03e9-1c5f-033c-37eb34401b52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2818.096831] env[62875]: DEBUG nova.compute.manager [req-2c954fe8-7743-430d-87b3-c2e5ecbba600 req-7bca9673-270f-4004-abe8-166e92742bad service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received event network-changed-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2818.096884] env[62875]: DEBUG nova.compute.manager [req-2c954fe8-7743-430d-87b3-c2e5ecbba600 req-7bca9673-270f-4004-abe8-166e92742bad service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Refreshing instance network info cache due to event network-changed-5a51b325-2a9b-4c76-8da9-9fa0817d61e9. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2818.097087] env[62875]: DEBUG oslo_concurrency.lockutils [req-2c954fe8-7743-430d-87b3-c2e5ecbba600 req-7bca9673-270f-4004-abe8-166e92742bad service nova] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2818.097234] env[62875]: DEBUG oslo_concurrency.lockutils [req-2c954fe8-7743-430d-87b3-c2e5ecbba600 req-7bca9673-270f-4004-abe8-166e92742bad service nova] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2818.097396] env[62875]: DEBUG nova.network.neutron [req-2c954fe8-7743-430d-87b3-c2e5ecbba600 req-7bca9673-270f-4004-abe8-166e92742bad service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Refreshing network info cache for port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2818.478904] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2818.479353] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Processing image c28dfddf-e555-419c-86a5-ca1247af5c36 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2818.479414] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/c28dfddf-e555-419c-86a5-ca1247af5c36.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2818.479599] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/c28dfddf-e555-419c-86a5-ca1247af5c36.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2818.479791] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2818.480047] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-815559d3-707a-46dc-bde8-0ec520acad70 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2818.488694] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2818.488840] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2818.489515] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1e498c0-7070-441d-a8e8-394df945122c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2818.494076] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2818.494076] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528dd841-e314-2cea-7781-9dabcbedf789" [ 2818.494076] env[62875]: _type = "Task" [ 2818.494076] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2818.500946] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528dd841-e314-2cea-7781-9dabcbedf789, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2818.792395] env[62875]: DEBUG nova.network.neutron [req-2c954fe8-7743-430d-87b3-c2e5ecbba600 req-7bca9673-270f-4004-abe8-166e92742bad service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updated VIF entry in instance network info cache for port 5a51b325-2a9b-4c76-8da9-9fa0817d61e9. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2818.792815] env[62875]: DEBUG nova.network.neutron [req-2c954fe8-7743-430d-87b3-c2e5ecbba600 req-7bca9673-270f-4004-abe8-166e92742bad service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2819.003784] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Preparing fetch location {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2819.004047] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Fetch image to [datastore2] OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052/OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052.vmdk {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2819.004236] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Downloading stream optimized image c28dfddf-e555-419c-86a5-ca1247af5c36 to [datastore2] OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052/OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052.vmdk on the data store datastore2 as vApp {{(pid=62875) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2819.004404] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Downloading image file data c28dfddf-e555-419c-86a5-ca1247af5c36 to the ESX as VM named 'OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052' {{(pid=62875) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2819.068602] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2819.068602] env[62875]: value = "resgroup-9" [ 2819.068602] env[62875]: _type = "ResourcePool" [ 2819.068602] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2819.068877] env[62875]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e0d90ce4-c8e9-476a-9718-d93019fd26b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2819.089215] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lease: (returnval){ [ 2819.089215] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527b4379-c03d-dd5c-535f-009cea68e6ff" [ 2819.089215] env[62875]: _type = "HttpNfcLease" [ 2819.089215] env[62875]: } obtained for vApp import into resource pool (val){ [ 2819.089215] env[62875]: value = "resgroup-9" [ 2819.089215] env[62875]: _type = "ResourcePool" [ 2819.089215] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2819.089520] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the lease: (returnval){ [ 2819.089520] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527b4379-c03d-dd5c-535f-009cea68e6ff" [ 2819.089520] env[62875]: _type = "HttpNfcLease" [ 2819.089520] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2819.098172] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2819.098172] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527b4379-c03d-dd5c-535f-009cea68e6ff" [ 2819.098172] env[62875]: _type = "HttpNfcLease" [ 2819.098172] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2819.295270] env[62875]: DEBUG oslo_concurrency.lockutils [req-2c954fe8-7743-430d-87b3-c2e5ecbba600 req-7bca9673-270f-4004-abe8-166e92742bad service nova] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2819.391631] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2819.391860] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2819.392044] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2819.392195] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2819.598167] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2819.598167] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527b4379-c03d-dd5c-535f-009cea68e6ff" [ 2819.598167] env[62875]: _type = "HttpNfcLease" [ 2819.598167] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2819.598641] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2819.598641] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527b4379-c03d-dd5c-535f-009cea68e6ff" [ 2819.598641] env[62875]: _type = "HttpNfcLease" [ 2819.598641] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2819.599143] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b99870-ef34-48d6-ab10-21c085306b54 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2819.606527] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e027fd-cf69-05d0-a7b9-78ae3ef49e1d/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2819.606712] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e027fd-cf69-05d0-a7b9-78ae3ef49e1d/disk-0.vmdk. {{(pid=62875) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2819.667683] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d56a4f11-fd1c-498f-883e-f3ef748c79dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2819.706501] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2820.701994] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2820.728980] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Completed reading data from the image iterator. {{(pid=62875) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2820.729276] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e027fd-cf69-05d0-a7b9-78ae3ef49e1d/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2820.730203] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536b220d-4b89-4234-b749-c55fdb6cfcd5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.736651] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e027fd-cf69-05d0-a7b9-78ae3ef49e1d/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2820.736813] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e027fd-cf69-05d0-a7b9-78ae3ef49e1d/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2820.737072] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9ca8ce0e-eaf1-41be-b689-0636df68c71a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.932707] env[62875]: DEBUG oslo_vmware.rw_handles [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e027fd-cf69-05d0-a7b9-78ae3ef49e1d/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2820.932949] env[62875]: INFO nova.virt.vmwareapi.images [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Downloaded image file data c28dfddf-e555-419c-86a5-ca1247af5c36 [ 2820.933763] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02ed454-8318-4a6a-9871-8626b18a289e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.949260] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3c2a009-d625-4503-b396-6bb074827b7b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.977025] env[62875]: INFO nova.virt.vmwareapi.images [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] The imported VM was unregistered [ 2820.979388] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Caching image {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2820.979653] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating directory with path [datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2820.979913] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3c486fa-5d62-4b19-8ec9-9befa16c6cb1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.990080] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Created directory with path [datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2820.990259] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052/OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052.vmdk to [datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/c28dfddf-e555-419c-86a5-ca1247af5c36.vmdk. {{(pid=62875) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2820.990488] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-478b2228-fea0-4340-bf6a-4428a8487043 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.996353] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2820.996353] env[62875]: value = "task-2180820" [ 2820.996353] env[62875]: _type = "Task" [ 2820.996353] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2821.003600] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180820, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2821.506739] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180820, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2822.007771] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180820, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2822.508564] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180820, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2822.706527] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2823.008861] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180820, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2823.509551] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180820, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.162265} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2823.509822] env[62875]: INFO nova.virt.vmwareapi.ds_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052/OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052.vmdk to [datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/c28dfddf-e555-419c-86a5-ca1247af5c36.vmdk. [ 2823.510013] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Cleaning up location [datastore2] OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2823.510186] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_c3b4af94-8c1b-4cc2-aaa1-ac39baf08052 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2823.510430] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01d31803-3b81-4ef4-86af-84e43bc9488b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2823.516707] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2823.516707] env[62875]: value = "task-2180821" [ 2823.516707] env[62875]: _type = "Task" [ 2823.516707] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2823.524140] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2824.026838] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03263} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2824.027229] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2824.027352] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/c28dfddf-e555-419c-86a5-ca1247af5c36.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2824.027588] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/c28dfddf-e555-419c-86a5-ca1247af5c36.vmdk to [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2/2e1129fe-f32c-4868-bc68-a39ea14fe9d2.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2824.027892] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10563b0a-d8ca-41f2-92a1-9d8e7c3aba89 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2824.035009] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2824.035009] env[62875]: value = "task-2180822" [ 2824.035009] env[62875]: _type = "Task" [ 2824.035009] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2824.041919] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180822, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2824.546427] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180822, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2824.707295] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2825.047279] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180822, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2825.550600] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180822, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2826.049554] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180822, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2826.550078] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180822, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.151621} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2826.550348] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/c28dfddf-e555-419c-86a5-ca1247af5c36.vmdk to [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2/2e1129fe-f32c-4868-bc68-a39ea14fe9d2.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2826.551106] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9aaf66e-3ced-425c-94fb-28fded3f6384 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2826.572773] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2/2e1129fe-f32c-4868-bc68-a39ea14fe9d2.vmdk or device None with type streamOptimized {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2826.573114] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70d28205-9049-40d8-91e6-8d434d3c6aae {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2826.591552] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2826.591552] env[62875]: value = "task-2180823" [ 2826.591552] env[62875]: _type = "Task" [ 2826.591552] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2826.598719] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180823, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2827.101678] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180823, 'name': ReconfigVM_Task, 'duration_secs': 0.256445} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2827.102030] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2/2e1129fe-f32c-4868-bc68-a39ea14fe9d2.vmdk or device None with type streamOptimized {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2827.103376] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'guest_format': None, 'encryption_secret_uuid': None, 'disk_bus': None, 'boot_index': 0, 'encryption_options': None, 'size': 0, 'device_name': '/dev/sda', 'encryption_format': None, 'encrypted': False, 'device_type': 'disk', 'image_id': 'a9637bcc-4de8-4ea1-be59-4c697becf2a7'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'delete_on_termination': False, 'attachment_id': '89c3b316-e682-45b5-9f2c-ebfe6aa0e6f1', 'disk_bus': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445036', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'name': 'volume-8e7af612-749b-4036-8ba9-d4ba902716aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '2e1129fe-f32c-4868-bc68-a39ea14fe9d2', 'attached_at': '', 'detached_at': '', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'serial': '8e7af612-749b-4036-8ba9-d4ba902716aa'}, 'mount_device': '/dev/sdb', 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=62875) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2827.103584] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Volume attach. Driver type: vmdk {{(pid=62875) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2827.103776] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445036', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'name': 'volume-8e7af612-749b-4036-8ba9-d4ba902716aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '2e1129fe-f32c-4868-bc68-a39ea14fe9d2', 'attached_at': '', 'detached_at': '', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'serial': '8e7af612-749b-4036-8ba9-d4ba902716aa'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2827.104569] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c4d439-965f-47b4-9854-800333531827 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2827.120143] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497a843f-b1e3-4f4f-94be-f7dcdfdac3ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2827.143959] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] volume-8e7af612-749b-4036-8ba9-d4ba902716aa/volume-8e7af612-749b-4036-8ba9-d4ba902716aa.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2827.144219] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0685a8d7-28d5-4ef0-a6c7-eee83140efe9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2827.162120] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2827.162120] env[62875]: value = "task-2180824" [ 2827.162120] env[62875]: _type = "Task" [ 2827.162120] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2827.169559] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180824, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2827.671772] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180824, 'name': ReconfigVM_Task, 'duration_secs': 0.266735} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2827.672056] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Reconfigured VM instance instance-00000069 to attach disk [datastore2] volume-8e7af612-749b-4036-8ba9-d4ba902716aa/volume-8e7af612-749b-4036-8ba9-d4ba902716aa.vmdk or device None with type thin {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2827.676623] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb7d5454-ab7c-4daa-b331-a4e63f771021 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2827.690986] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2827.690986] env[62875]: value = "task-2180825" [ 2827.690986] env[62875]: _type = "Task" [ 2827.690986] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2827.699423] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180825, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2828.200928] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180825, 'name': ReconfigVM_Task, 'duration_secs': 0.127603} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2828.201290] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445036', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'name': 'volume-8e7af612-749b-4036-8ba9-d4ba902716aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '2e1129fe-f32c-4868-bc68-a39ea14fe9d2', 'attached_at': '', 'detached_at': '', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'serial': '8e7af612-749b-4036-8ba9-d4ba902716aa'} {{(pid=62875) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2828.201816] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f1d8048-53ed-4291-a945-47f8b4e7966d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2828.207316] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2828.207316] env[62875]: value = "task-2180826" [ 2828.207316] env[62875]: _type = "Task" [ 2828.207316] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2828.214602] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180826, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2828.706868] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2828.707111] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2828.707186] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 2828.717235] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180826, 'name': Rename_Task, 'duration_secs': 0.138334} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2828.717486] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2828.717715] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b01cd89c-5980-41e6-a624-606bf08f6af7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2828.724070] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2828.724070] env[62875]: value = "task-2180827" [ 2828.724070] env[62875]: _type = "Task" [ 2828.724070] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2828.731137] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2829.213233] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2829.213609] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2829.213609] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2829.213674] env[62875]: DEBUG nova.objects.instance [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lazy-loading 'info_cache' on Instance uuid 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2829.237465] env[62875]: DEBUG oslo_vmware.api [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180827, 'name': PowerOnVM_Task, 'duration_secs': 0.404578} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2829.237766] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2829.332898] env[62875]: DEBUG nova.compute.manager [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2829.333833] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5feec3-65b2-4ffd-b04e-6f071825e949 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2829.852292] env[62875]: DEBUG oslo_concurrency.lockutils [None req-bd901ebc-0074-4a00-b4e6-27e48a1800dd tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 29.066s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2830.937891] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [{"id": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "address": "fa:16:3e:a2:9b:b9", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a51b325-2a", "ovs_interfaceid": "5a51b325-2a9b-4c76-8da9-9fa0817d61e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2831.440450] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-2e1129fe-f32c-4868-bc68-a39ea14fe9d2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2831.440666] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 2831.440854] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2831.944228] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2831.944688] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2831.944688] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2831.944880] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2831.945747] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b58a3d-bac3-4f5b-82f6-2d339996edde {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2831.953936] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94809001-1956-471b-bd50-cee752f57c43 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2831.968192] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9d8968-1e99-458c-9ec7-6ed1d8ee1af2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2831.974541] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44f5357-46c1-44af-9e26-5bf2f9a6a862 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.004763] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180876MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2832.004952] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2832.005134] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2833.029706] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2833.029998] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2833.030080] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2833.055007] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bbfbd1-7359-4557-bdfe-8494eae068c7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2833.062636] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42abc71-402c-435b-8ffc-db574922a61e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2833.091903] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7a6586-5c6c-4f4a-b205-8230d821162b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2833.098794] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0234f41-c7c1-4119-9777-7ec32889000a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2833.111376] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2833.614515] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2834.119488] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2834.119918] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2843.116113] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2865.724015] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2865.724426] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2866.227933] env[62875]: INFO nova.compute.manager [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Detaching volume 8e7af612-749b-4036-8ba9-d4ba902716aa [ 2866.258234] env[62875]: INFO nova.virt.block_device [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Attempting to driver detach volume 8e7af612-749b-4036-8ba9-d4ba902716aa from mountpoint /dev/sdb [ 2866.258477] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Volume detach. Driver type: vmdk {{(pid=62875) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2866.258664] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445036', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'name': 'volume-8e7af612-749b-4036-8ba9-d4ba902716aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '2e1129fe-f32c-4868-bc68-a39ea14fe9d2', 'attached_at': '', 'detached_at': '', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'serial': '8e7af612-749b-4036-8ba9-d4ba902716aa'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2866.259585] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be6e7cc-3353-4d56-9fe6-6131f0c46fe8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2866.280143] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5b313b-de45-49b2-9f61-0009c876373e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2866.286625] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a614703-4450-4064-aee4-0213a95b020c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2866.305949] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1f5092-639d-4d1d-84b9-c02ca75184b4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2866.320519] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] The volume has not been displaced from its original location: [datastore2] volume-8e7af612-749b-4036-8ba9-d4ba902716aa/volume-8e7af612-749b-4036-8ba9-d4ba902716aa.vmdk. No consolidation needed. {{(pid=62875) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2866.325559] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2866.325801] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c167406-9bbd-4251-91c2-f0b04ffa96e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2866.342536] env[62875]: DEBUG oslo_vmware.api [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2866.342536] env[62875]: value = "task-2180828" [ 2866.342536] env[62875]: _type = "Task" [ 2866.342536] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2866.349574] env[62875]: DEBUG oslo_vmware.api [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180828, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2866.852117] env[62875]: DEBUG oslo_vmware.api [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180828, 'name': ReconfigVM_Task, 'duration_secs': 0.216144} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2866.852443] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=62875) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2866.857055] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf5a89a5-4518-4110-9bc8-a2e24b530813 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2866.872402] env[62875]: DEBUG oslo_vmware.api [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2866.872402] env[62875]: value = "task-2180829" [ 2866.872402] env[62875]: _type = "Task" [ 2866.872402] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2866.880182] env[62875]: DEBUG oslo_vmware.api [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180829, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2867.382972] env[62875]: DEBUG oslo_vmware.api [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180829, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2867.884233] env[62875]: DEBUG oslo_vmware.api [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180829, 'name': ReconfigVM_Task, 'duration_secs': 0.758179} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2867.884641] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-445036', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'name': 'volume-8e7af612-749b-4036-8ba9-d4ba902716aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '2e1129fe-f32c-4868-bc68-a39ea14fe9d2', 'attached_at': '', 'detached_at': '', 'volume_id': '8e7af612-749b-4036-8ba9-d4ba902716aa', 'serial': '8e7af612-749b-4036-8ba9-d4ba902716aa'} {{(pid=62875) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2868.423260] env[62875]: DEBUG nova.objects.instance [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'flavor' on Instance uuid 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2869.432423] env[62875]: DEBUG oslo_concurrency.lockutils [None req-4bc301a9-10aa-44ba-93d9-6e7791331c58 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.708s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2869.951744] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2869.952036] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2869.952271] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2869.952463] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2869.952637] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2869.954816] env[62875]: INFO nova.compute.manager [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Terminating instance [ 2870.458838] env[62875]: DEBUG nova.compute.manager [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2870.459231] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2870.460030] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947a16f0-c9fd-4788-b40e-43361f8cec73 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.467765] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2870.467994] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5ab5b7f-798c-4d37-a384-75c55423711e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.474015] env[62875]: DEBUG oslo_vmware.api [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2870.474015] env[62875]: value = "task-2180830" [ 2870.474015] env[62875]: _type = "Task" [ 2870.474015] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2870.482221] env[62875]: DEBUG oslo_vmware.api [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2870.983971] env[62875]: DEBUG oslo_vmware.api [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180830, 'name': PowerOffVM_Task, 'duration_secs': 0.183366} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2870.984271] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2870.984443] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2870.984691] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab5334d5-c747-4cb9-9f0b-c19ce67ddbc1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2871.188476] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2871.188710] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2871.188895] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleting the datastore file [datastore2] 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2871.189212] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1cd61d8d-83e0-4504-a8ec-bde57df7f45b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2871.195537] env[62875]: DEBUG oslo_vmware.api [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2871.195537] env[62875]: value = "task-2180832" [ 2871.195537] env[62875]: _type = "Task" [ 2871.195537] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2871.203958] env[62875]: DEBUG oslo_vmware.api [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2871.705402] env[62875]: DEBUG oslo_vmware.api [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126679} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2871.705670] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2871.705829] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2871.706014] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2871.706193] env[62875]: INFO nova.compute.manager [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2871.706497] env[62875]: DEBUG oslo.service.loopingcall [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2871.706694] env[62875]: DEBUG nova.compute.manager [-] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2871.706789] env[62875]: DEBUG nova.network.neutron [-] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2872.162434] env[62875]: DEBUG nova.compute.manager [req-5f44c07b-f473-4c8b-a236-80d54f4c1610 req-ecc890fe-fbf3-480f-ba91-f138741077e5 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Received event network-vif-deleted-5a51b325-2a9b-4c76-8da9-9fa0817d61e9 {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2872.162631] env[62875]: INFO nova.compute.manager [req-5f44c07b-f473-4c8b-a236-80d54f4c1610 req-ecc890fe-fbf3-480f-ba91-f138741077e5 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Neutron deleted interface 5a51b325-2a9b-4c76-8da9-9fa0817d61e9; detaching it from the instance and deleting it from the info cache [ 2872.162799] env[62875]: DEBUG nova.network.neutron [req-5f44c07b-f473-4c8b-a236-80d54f4c1610 req-ecc890fe-fbf3-480f-ba91-f138741077e5 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2872.646810] env[62875]: DEBUG nova.network.neutron [-] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2872.664979] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25081859-9fa1-45ea-abed-85bec5add688 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2872.677224] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fb8a9f-08eb-49ef-8d13-1895ebf9d645 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2872.699055] env[62875]: DEBUG nova.compute.manager [req-5f44c07b-f473-4c8b-a236-80d54f4c1610 req-ecc890fe-fbf3-480f-ba91-f138741077e5 service nova] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Detach interface failed, port_id=5a51b325-2a9b-4c76-8da9-9fa0817d61e9, reason: Instance 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 2873.150655] env[62875]: INFO nova.compute.manager [-] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Took 1.44 seconds to deallocate network for instance. [ 2873.657723] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2873.658015] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2873.658261] env[62875]: DEBUG nova.objects.instance [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'resources' on Instance uuid 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2874.192567] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aded401-10fd-47df-9edc-a771f38fb500 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.199921] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc47ae3-8208-4b13-af2d-fe74cb41273c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.230402] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ca1508-6955-464f-a24c-4d427f386325 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.237106] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e1cf4e-6c44-40fb-92ae-175096bfd3cb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.249890] env[62875]: DEBUG nova.compute.provider_tree [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2874.753453] env[62875]: DEBUG nova.scheduler.client.report [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2875.258930] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2875.277744] env[62875]: INFO nova.scheduler.client.report [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted allocations for instance 2e1129fe-f32c-4868-bc68-a39ea14fe9d2 [ 2875.785777] env[62875]: DEBUG oslo_concurrency.lockutils [None req-0c060cb7-cd19-48b7-8a12-07511dd37af5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "2e1129fe-f32c-4868-bc68-a39ea14fe9d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.834s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2877.706133] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2877.861448] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2877.861705] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2878.364744] env[62875]: DEBUG nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Starting instance... {{(pid=62875) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2878.706065] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2878.706385] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2878.706743] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 2878.884675] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2878.884931] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2878.886475] env[62875]: INFO nova.compute.claims [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2879.706747] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2879.924771] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69321bcb-6c75-4308-a59b-b6e4fde36041 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2879.932436] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320fc71b-9536-4679-969f-d73b379cea68 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2879.962133] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2f9ae4-e8ff-4a04-9ede-9842f2a2c1b4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2879.968862] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1a2481-af81-40fb-9f74-4e405fe0f128 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2879.981562] env[62875]: DEBUG nova.compute.provider_tree [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2880.486037] env[62875]: DEBUG nova.scheduler.client.report [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2880.989859] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.105s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2880.990423] env[62875]: DEBUG nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Start building networks asynchronously for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2881.495321] env[62875]: DEBUG nova.compute.utils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Using /dev/sd instead of None {{(pid=62875) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2881.496728] env[62875]: DEBUG nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Allocating IP information in the background. {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2881.496899] env[62875]: DEBUG nova.network.neutron [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] allocate_for_instance() {{(pid=62875) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2881.558040] env[62875]: DEBUG nova.policy [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e57a483ae3d479e8fb30feb3e0b8310', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccc96aead000465a9613e6bb73d31721', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62875) authorize /opt/stack/nova/nova/policy.py:192}} [ 2881.702119] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2881.808745] env[62875]: DEBUG nova.network.neutron [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Successfully created port: f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2882.000387] env[62875]: DEBUG nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Start building block device mappings for instance. {{(pid=62875) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2883.011364] env[62875]: DEBUG nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Start spawning the instance on the hypervisor. {{(pid=62875) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2883.035742] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-25T04:48:20Z,direct_url=,disk_format='vmdk',id=a9637bcc-4de8-4ea1-be59-4c697becf2a7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='095aa2361c8c47a1b4891b36019a4780',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-25T04:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2883.035994] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2883.036169] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2883.036352] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2883.036500] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2883.036646] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2883.036846] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2883.037010] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2883.037210] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2883.037376] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2883.037545] env[62875]: DEBUG nova.virt.hardware [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2883.038448] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cb913b-854e-49e7-80bf-50626aa49172 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2883.046129] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1aad3e-0e22-4a21-9ca0-db3f398d0110 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2884.706485] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2884.706485] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2888.255856] env[62875]: DEBUG nova.compute.manager [req-46243c6d-443f-4d4f-bb3a-2527595829f4 req-29769821-b6d9-4472-9669-038952657bab service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received event network-vif-plugged-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2888.256101] env[62875]: DEBUG oslo_concurrency.lockutils [req-46243c6d-443f-4d4f-bb3a-2527595829f4 req-29769821-b6d9-4472-9669-038952657bab service nova] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2888.256262] env[62875]: DEBUG oslo_concurrency.lockutils [req-46243c6d-443f-4d4f-bb3a-2527595829f4 req-29769821-b6d9-4472-9669-038952657bab service nova] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2888.256410] env[62875]: DEBUG oslo_concurrency.lockutils [req-46243c6d-443f-4d4f-bb3a-2527595829f4 req-29769821-b6d9-4472-9669-038952657bab service nova] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2888.256566] env[62875]: DEBUG nova.compute.manager [req-46243c6d-443f-4d4f-bb3a-2527595829f4 req-29769821-b6d9-4472-9669-038952657bab service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] No waiting events found dispatching network-vif-plugged-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2888.256735] env[62875]: WARNING nova.compute.manager [req-46243c6d-443f-4d4f-bb3a-2527595829f4 req-29769821-b6d9-4472-9669-038952657bab service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received unexpected event network-vif-plugged-f4db9102-d2d3-403c-bfd9-37d1942d463b for instance with vm_state building and task_state spawning. [ 2888.339321] env[62875]: DEBUG nova.network.neutron [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Successfully updated port: f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2888.706247] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2888.706406] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 2888.706536] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 2888.842392] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2888.842562] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2888.842680] env[62875]: DEBUG nova.network.neutron [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2889.209475] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Skipping network cache update for instance because it is Building. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 2889.209672] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Didn't find any instances for network info cache update. {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 2889.209836] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2889.371485] env[62875]: DEBUG nova.network.neutron [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Instance cache missing network info. {{(pid=62875) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2889.491270] env[62875]: DEBUG nova.network.neutron [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4db9102-d2", "ovs_interfaceid": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2889.713049] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2889.713291] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2889.713481] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2889.713652] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2889.714896] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7657243d-5e16-455c-a1f8-c0dea0584935 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2889.722888] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd448b2-af6a-423b-bcfc-ac933f9b4f89 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2889.737123] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7690ed4b-bdee-4731-be0f-47c0820cf9eb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2889.743191] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092f799f-b928-4bee-9cba-585cbe7dd6fc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2890.438466] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2890.438770] env[62875]: DEBUG nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Instance network_info: |[{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4db9102-d2", "ovs_interfaceid": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62875) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2890.440160] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181076MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2890.440320] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2890.440512] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2890.445017] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:d9:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4db9102-d2d3-403c-bfd9-37d1942d463b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2890.449099] env[62875]: DEBUG oslo.service.loopingcall [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2890.450105] env[62875]: DEBUG nova.compute.manager [req-e5c7a8cc-fc09-42c7-81d9-7ee9a99c1c9c req-35fbb22d-9b45-42d5-a859-1ef0b6e59a40 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received event network-changed-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2890.450289] env[62875]: DEBUG nova.compute.manager [req-e5c7a8cc-fc09-42c7-81d9-7ee9a99c1c9c req-35fbb22d-9b45-42d5-a859-1ef0b6e59a40 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Refreshing instance network info cache due to event network-changed-f4db9102-d2d3-403c-bfd9-37d1942d463b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2890.450487] env[62875]: DEBUG oslo_concurrency.lockutils [req-e5c7a8cc-fc09-42c7-81d9-7ee9a99c1c9c req-35fbb22d-9b45-42d5-a859-1ef0b6e59a40 service nova] Acquiring lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2890.450622] env[62875]: DEBUG oslo_concurrency.lockutils [req-e5c7a8cc-fc09-42c7-81d9-7ee9a99c1c9c req-35fbb22d-9b45-42d5-a859-1ef0b6e59a40 service nova] Acquired lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2890.450769] env[62875]: DEBUG nova.network.neutron [req-e5c7a8cc-fc09-42c7-81d9-7ee9a99c1c9c req-35fbb22d-9b45-42d5-a859-1ef0b6e59a40 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Refreshing network info cache for port f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2890.451999] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2890.452346] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d762a771-2261-4406-9a34-9b3b2e5931f9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2890.473394] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2890.473394] env[62875]: value = "task-2180833" [ 2890.473394] env[62875]: _type = "Task" [ 2890.473394] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2890.481021] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180833, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2890.983514] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180833, 'name': CreateVM_Task, 'duration_secs': 0.360654} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2890.985629] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2890.986285] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2890.986450] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2890.986781] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2890.987303] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9cd3e05-a78e-424b-aad1-fe16efed0eda {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2890.991415] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2890.991415] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52757b31-d229-d0a0-2866-3b26651064ec" [ 2890.991415] env[62875]: _type = "Task" [ 2890.991415] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2890.998535] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52757b31-d229-d0a0-2866-3b26651064ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2891.313067] env[62875]: DEBUG nova.network.neutron [req-e5c7a8cc-fc09-42c7-81d9-7ee9a99c1c9c req-35fbb22d-9b45-42d5-a859-1ef0b6e59a40 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updated VIF entry in instance network info cache for port f4db9102-d2d3-403c-bfd9-37d1942d463b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2891.313429] env[62875]: DEBUG nova.network.neutron [req-e5c7a8cc-fc09-42c7-81d9-7ee9a99c1c9c req-35fbb22d-9b45-42d5-a859-1ef0b6e59a40 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4db9102-d2", "ovs_interfaceid": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2891.475585] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2891.475864] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2891.475943] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2891.503298] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52757b31-d229-d0a0-2866-3b26651064ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010073} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2891.504375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2891.504595] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Processing image a9637bcc-4de8-4ea1-be59-4c697becf2a7 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2891.504830] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2891.504972] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2891.505165] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2891.505887] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef78d4f5-33d6-4cdc-aa2a-0401892c6097 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2891.508243] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-589ee526-9140-4b65-b129-a47bcea5ba70 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2891.514888] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0047898e-1089-46d0-a5e0-abb0cb2b895a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2891.518526] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2891.518703] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2891.519682] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a01f663-2538-444f-bda5-abe015dd4707 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2891.547374] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0ad11e-356f-4327-9fbb-7394c63f79fb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2891.549925] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2891.549925] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52785bb7-1c83-7608-3054-8e4b925d3592" [ 2891.549925] env[62875]: _type = "Task" [ 2891.549925] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2891.556066] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043c4652-1108-428c-88a2-faf5977e9eef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2891.562613] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52785bb7-1c83-7608-3054-8e4b925d3592, 'name': SearchDatastore_Task, 'duration_secs': 0.008228} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2891.563566] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4a955cc-0f9e-4428-b718-c9b752b6d450 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2891.572980] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2891.576709] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2891.576709] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e108d8-8f4a-805c-78fc-c3d9d06e7501" [ 2891.576709] env[62875]: _type = "Task" [ 2891.576709] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2891.583859] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e108d8-8f4a-805c-78fc-c3d9d06e7501, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2891.816042] env[62875]: DEBUG oslo_concurrency.lockutils [req-e5c7a8cc-fc09-42c7-81d9-7ee9a99c1c9c req-35fbb22d-9b45-42d5-a859-1ef0b6e59a40 service nova] Releasing lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2892.076146] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2892.088441] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e108d8-8f4a-805c-78fc-c3d9d06e7501, 'name': SearchDatastore_Task, 'duration_secs': 0.009395} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2892.089246] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2892.089488] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8/00e65eb9-1db1-4456-9603-8b4cbff8ffe8.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2892.089735] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d0a3e5a-32eb-4240-aae9-ddf0a05c64f7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2892.096441] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2892.096441] env[62875]: value = "task-2180834" [ 2892.096441] env[62875]: _type = "Task" [ 2892.096441] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2892.104937] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180834, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2892.583757] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2892.584196] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.143s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2892.606523] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180834, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.421708} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2892.606782] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7/a9637bcc-4de8-4ea1-be59-4c697becf2a7.vmdk to [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8/00e65eb9-1db1-4456-9603-8b4cbff8ffe8.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2892.606992] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Extending root virtual disk to 1048576 {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2892.607573] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-461dd155-dd12-45c7-8a5b-2de868032a59 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2892.613911] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2892.613911] env[62875]: value = "task-2180835" [ 2892.613911] env[62875]: _type = "Task" [ 2892.613911] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2892.622107] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180835, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2893.123929] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180835, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0587} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2893.124144] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Extended root virtual disk {{(pid=62875) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2893.124873] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4a2ec3-f0c6-4a13-81ad-6189f7adad67 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.146136] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8/00e65eb9-1db1-4456-9603-8b4cbff8ffe8.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2893.146376] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a15cdeaf-065e-4ef4-beb5-64adde52b384 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.164773] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2893.164773] env[62875]: value = "task-2180836" [ 2893.164773] env[62875]: _type = "Task" [ 2893.164773] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2893.171733] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180836, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2893.674130] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180836, 'name': ReconfigVM_Task, 'duration_secs': 0.263885} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2893.674487] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8/00e65eb9-1db1-4456-9603-8b4cbff8ffe8.vmdk or device None with type sparse {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2893.675030] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5d63a0a-2ae6-43b0-ae8a-7ded8d551547 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.680982] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2893.680982] env[62875]: value = "task-2180837" [ 2893.680982] env[62875]: _type = "Task" [ 2893.680982] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2893.688500] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180837, 'name': Rename_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2894.194278] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180837, 'name': Rename_Task, 'duration_secs': 0.137017} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2894.194641] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2894.194995] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51969989-6bf6-4716-9cc1-b8483e303719 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2894.202654] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2894.202654] env[62875]: value = "task-2180838" [ 2894.202654] env[62875]: _type = "Task" [ 2894.202654] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2894.213823] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2894.712592] env[62875]: DEBUG oslo_vmware.api [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180838, 'name': PowerOnVM_Task, 'duration_secs': 0.435035} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2894.712966] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2894.713061] env[62875]: INFO nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Took 11.70 seconds to spawn the instance on the hypervisor. [ 2894.713245] env[62875]: DEBUG nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2894.714071] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9195a64e-0d0e-4743-8812-ab782ab8f351 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2895.230852] env[62875]: INFO nova.compute.manager [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Took 16.36 seconds to build instance. [ 2895.648969] env[62875]: DEBUG nova.compute.manager [req-630f0fb4-e143-4513-beac-d61c8edbf9ec req-84b4943a-440d-4809-92f4-b7e998f27e9b service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received event network-changed-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2895.648969] env[62875]: DEBUG nova.compute.manager [req-630f0fb4-e143-4513-beac-d61c8edbf9ec req-84b4943a-440d-4809-92f4-b7e998f27e9b service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Refreshing instance network info cache due to event network-changed-f4db9102-d2d3-403c-bfd9-37d1942d463b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2895.649098] env[62875]: DEBUG oslo_concurrency.lockutils [req-630f0fb4-e143-4513-beac-d61c8edbf9ec req-84b4943a-440d-4809-92f4-b7e998f27e9b service nova] Acquiring lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2895.649674] env[62875]: DEBUG oslo_concurrency.lockutils [req-630f0fb4-e143-4513-beac-d61c8edbf9ec req-84b4943a-440d-4809-92f4-b7e998f27e9b service nova] Acquired lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2895.649674] env[62875]: DEBUG nova.network.neutron [req-630f0fb4-e143-4513-beac-d61c8edbf9ec req-84b4943a-440d-4809-92f4-b7e998f27e9b service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Refreshing network info cache for port f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2895.732142] env[62875]: DEBUG oslo_concurrency.lockutils [None req-db8c5317-7598-4808-9774-dcfcc0401fe5 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.870s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2896.362169] env[62875]: DEBUG nova.network.neutron [req-630f0fb4-e143-4513-beac-d61c8edbf9ec req-84b4943a-440d-4809-92f4-b7e998f27e9b service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updated VIF entry in instance network info cache for port f4db9102-d2d3-403c-bfd9-37d1942d463b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2896.362564] env[62875]: DEBUG nova.network.neutron [req-630f0fb4-e143-4513-beac-d61c8edbf9ec req-84b4943a-440d-4809-92f4-b7e998f27e9b service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4db9102-d2", "ovs_interfaceid": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2896.865360] env[62875]: DEBUG oslo_concurrency.lockutils [req-630f0fb4-e143-4513-beac-d61c8edbf9ec req-84b4943a-440d-4809-92f4-b7e998f27e9b service nova] Releasing lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2906.706772] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_image_cache_manager_pass {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2906.707107] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2906.707910] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2906.708052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2906.708119] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2906.708381] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2906.708638] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2907.213259] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bec9963-58d6-4ee4-a120-cba796f9a39b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2907.221980] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e8eb2d-622f-4e1a-a500-46a286468a9a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2907.738605] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-117939b7-0fb2-452c-9032-cdea569235a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2907.744564] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2907.744564] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526ef1d8-d742-e78d-3cd8-cf1e6bb0f0fe" [ 2907.744564] env[62875]: _type = "Task" [ 2907.744564] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2907.751895] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526ef1d8-d742-e78d-3cd8-cf1e6bb0f0fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2908.259566] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526ef1d8-d742-e78d-3cd8-cf1e6bb0f0fe, 'name': SearchDatastore_Task, 'duration_secs': 0.022513} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2908.259980] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/1e103813-659c-4671-83d9-1ab7dfdca2a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2908.260127] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/1e103813-659c-4671-83d9-1ab7dfdca2a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2908.260499] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1e103813-659c-4671-83d9-1ab7dfdca2a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2908.260786] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d1019c3-49e8-4247-9f1e-9d9c50bfe1d0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.265185] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2908.265185] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522a08cb-b400-2abe-b4c7-51427e347e3a" [ 2908.265185] env[62875]: _type = "Task" [ 2908.265185] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2908.272556] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522a08cb-b400-2abe-b4c7-51427e347e3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2908.775782] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522a08cb-b400-2abe-b4c7-51427e347e3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010359} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2908.776778] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/1e103813-659c-4671-83d9-1ab7dfdca2a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2908.776941] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/6159033f-c459-4365-b259-a8618572041a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2908.777060] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/6159033f-c459-4365-b259-a8618572041a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2908.777364] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6159033f-c459-4365-b259-a8618572041a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2908.777626] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5251ac86-c274-4c89-b5b4-e4756ddafbb7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.781825] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2908.781825] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529e70db-0e0d-6dbe-28aa-1ce268f5b330" [ 2908.781825] env[62875]: _type = "Task" [ 2908.781825] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2908.789872] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529e70db-0e0d-6dbe-28aa-1ce268f5b330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2909.292282] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529e70db-0e0d-6dbe-28aa-1ce268f5b330, 'name': SearchDatastore_Task, 'duration_secs': 0.010475} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2909.292623] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/6159033f-c459-4365-b259-a8618572041a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2909.292850] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/85313986-fa3b-4ef0-b642-386ad69296c4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2909.292969] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/85313986-fa3b-4ef0-b642-386ad69296c4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2909.293998] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/85313986-fa3b-4ef0-b642-386ad69296c4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2909.294269] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aef7ebdc-1372-48e1-b319-3a2cf205d849 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2909.299052] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2909.299052] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52107009-c055-cfde-8513-7bededfdde72" [ 2909.299052] env[62875]: _type = "Task" [ 2909.299052] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2909.306056] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52107009-c055-cfde-8513-7bededfdde72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2909.809239] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52107009-c055-cfde-8513-7bededfdde72, 'name': SearchDatastore_Task, 'duration_secs': 0.009444} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2909.809665] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore1] devstack-image-cache_base/85313986-fa3b-4ef0-b642-386ad69296c4 is no longer used. Deleting! [ 2909.809750] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore1] devstack-image-cache_base/85313986-fa3b-4ef0-b642-386ad69296c4 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2909.809944] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d8489c4-0a4d-4c96-9ef0-cf582fd7a11e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2909.816568] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2909.816568] env[62875]: value = "task-2180839" [ 2909.816568] env[62875]: _type = "Task" [ 2909.816568] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2909.823670] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2910.326283] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106334} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2910.326517] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2910.326693] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/85313986-fa3b-4ef0-b642-386ad69296c4" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2910.326927] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/5648e58e-fbc8-490b-b5ad-d9da446089f2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2910.327056] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/5648e58e-fbc8-490b-b5ad-d9da446089f2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2910.327375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5648e58e-fbc8-490b-b5ad-d9da446089f2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2910.327629] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cf271f5-ff82-4aa6-9d90-3d7b6bbc879c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2910.331634] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2910.331634] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5270288e-9d49-fe92-c4b9-79c76e6bbbf8" [ 2910.331634] env[62875]: _type = "Task" [ 2910.331634] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2910.338445] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5270288e-9d49-fe92-c4b9-79c76e6bbbf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2910.842503] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5270288e-9d49-fe92-c4b9-79c76e6bbbf8, 'name': SearchDatastore_Task, 'duration_secs': 0.010076} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2910.842887] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/5648e58e-fbc8-490b-b5ad-d9da446089f2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2910.843049] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/5e2947cb-6966-4d47-ac04-4bd4dc218aec" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2910.843169] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/5e2947cb-6966-4d47-ac04-4bd4dc218aec" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2910.843475] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5e2947cb-6966-4d47-ac04-4bd4dc218aec" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2910.843728] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4394988c-7c18-4b6b-8152-3c62a8e5082b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2910.847914] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2910.847914] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fe7ecb-7843-fedf-c9fd-697dde86808f" [ 2910.847914] env[62875]: _type = "Task" [ 2910.847914] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2910.855074] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fe7ecb-7843-fedf-c9fd-697dde86808f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2911.357716] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fe7ecb-7843-fedf-c9fd-697dde86808f, 'name': SearchDatastore_Task, 'duration_secs': 0.01041} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2911.358057] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/5e2947cb-6966-4d47-ac04-4bd4dc218aec" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2911.358285] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/5040a39a-633b-42a5-a240-f4257866c4a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2911.358404] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/5040a39a-633b-42a5-a240-f4257866c4a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2911.358704] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5040a39a-633b-42a5-a240-f4257866c4a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2911.358970] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69aa8fe2-c776-4358-9f58-0b7b81724b44 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2911.363288] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2911.363288] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5227e122-4403-d98a-9088-4bce8f16ee76" [ 2911.363288] env[62875]: _type = "Task" [ 2911.363288] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2911.370537] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5227e122-4403-d98a-9088-4bce8f16ee76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2911.873803] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5227e122-4403-d98a-9088-4bce8f16ee76, 'name': SearchDatastore_Task, 'duration_secs': 0.010214} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2911.874214] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/5040a39a-633b-42a5-a240-f4257866c4a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2911.874366] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/5fab30d6-e71c-4f86-b828-5b81dacfd941" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2911.874488] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/5fab30d6-e71c-4f86-b828-5b81dacfd941" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2911.874781] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5fab30d6-e71c-4f86-b828-5b81dacfd941" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2911.875356] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70e65ec3-73f0-41e9-b737-512701cb7f9a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2911.879177] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2911.879177] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb8ff5-8e43-59af-c34d-f5ef9ece5680" [ 2911.879177] env[62875]: _type = "Task" [ 2911.879177] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2911.886124] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb8ff5-8e43-59af-c34d-f5ef9ece5680, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2912.390647] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb8ff5-8e43-59af-c34d-f5ef9ece5680, 'name': SearchDatastore_Task, 'duration_secs': 0.009801} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2912.390984] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/5fab30d6-e71c-4f86-b828-5b81dacfd941" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2912.391225] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/5c925878-5e35-4007-8dde-766129fe0c98" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2912.391349] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/5c925878-5e35-4007-8dde-766129fe0c98" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2912.391668] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5c925878-5e35-4007-8dde-766129fe0c98" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2912.391928] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98b2bb27-a237-4b6d-871d-6269be3888d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2912.396202] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2912.396202] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dbd3c3-df0b-ed30-15be-9d2bda7c8377" [ 2912.396202] env[62875]: _type = "Task" [ 2912.396202] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2912.403540] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dbd3c3-df0b-ed30-15be-9d2bda7c8377, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2912.906654] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dbd3c3-df0b-ed30-15be-9d2bda7c8377, 'name': SearchDatastore_Task, 'duration_secs': 0.009856} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2912.907034] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/5c925878-5e35-4007-8dde-766129fe0c98" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2912.907241] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2912.907363] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2912.907679] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2912.907939] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8596b8d-23c2-41ef-9081-145ec69ec2c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2912.912070] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2912.912070] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52785266-9738-ed7d-8a5f-dde1eee9b765" [ 2912.912070] env[62875]: _type = "Task" [ 2912.912070] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2912.919799] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52785266-9738-ed7d-8a5f-dde1eee9b765, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2913.423241] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52785266-9738-ed7d-8a5f-dde1eee9b765, 'name': SearchDatastore_Task, 'duration_secs': 0.010252} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2913.423541] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore1] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d is no longer used. Deleting! [ 2913.423682] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2913.423926] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a39b81f-4eae-4c62-a0f6-fe97125e9a30 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2913.430165] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2913.430165] env[62875]: value = "task-2180840" [ 2913.430165] env[62875]: _type = "Task" [ 2913.430165] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2913.437687] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2913.939675] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102195} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2913.940100] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2913.940100] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2913.940292] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2913.940408] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2913.940762] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2913.941049] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95ff17be-df62-43d2-9c05-7b81f3e0c40f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2913.945308] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2913.945308] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523ea349-cab1-a208-beef-5c02026c6e07" [ 2913.945308] env[62875]: _type = "Task" [ 2913.945308] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2913.952582] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523ea349-cab1-a208-beef-5c02026c6e07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2914.455814] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523ea349-cab1-a208-beef-5c02026c6e07, 'name': SearchDatastore_Task, 'duration_secs': 0.010021} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2914.456131] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore1] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17 is no longer used. Deleting! [ 2914.456272] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore1] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2914.456522] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73ae7789-f9eb-40d8-87ea-8ddc411bf067 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.462668] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2914.462668] env[62875]: value = "task-2180841" [ 2914.462668] env[62875]: _type = "Task" [ 2914.462668] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2914.469756] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2914.972974] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096129} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2914.973385] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2914.973385] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2914.973609] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/f5bd38ff-4c28-47fb-a099-c0e9c58b5bb6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2914.973702] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/f5bd38ff-4c28-47fb-a099-c0e9c58b5bb6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2914.974042] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f5bd38ff-4c28-47fb-a099-c0e9c58b5bb6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2914.974318] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f5988e8-b4a0-49d9-ac0e-7b608e0b759f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.979144] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2914.979144] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52000faa-4428-780c-98e3-a82e65a49afe" [ 2914.979144] env[62875]: _type = "Task" [ 2914.979144] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2914.986274] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52000faa-4428-780c-98e3-a82e65a49afe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2915.489348] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52000faa-4428-780c-98e3-a82e65a49afe, 'name': SearchDatastore_Task, 'duration_secs': 0.009682} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2915.489701] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore1] devstack-image-cache_base/f5bd38ff-4c28-47fb-a099-c0e9c58b5bb6 is no longer used. Deleting! [ 2915.489848] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f5bd38ff-4c28-47fb-a099-c0e9c58b5bb6 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2915.490119] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c37f524-f818-48a5-9dd9-1eece10b2b30 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2915.495544] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2915.495544] env[62875]: value = "task-2180842" [ 2915.495544] env[62875]: _type = "Task" [ 2915.495544] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2915.502675] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2916.004636] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102182} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2916.005069] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2916.005069] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/f5bd38ff-4c28-47fb-a099-c0e9c58b5bb6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2916.005263] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/a1f49a40-fc6e-4323-a6fa-28464cfb8894" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2916.005381] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/a1f49a40-fc6e-4323-a6fa-28464cfb8894" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2916.005693] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a1f49a40-fc6e-4323-a6fa-28464cfb8894" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2916.005952] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-108878d1-2bc9-4ea5-9969-259492beacef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2916.010156] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2916.010156] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b48ce8-02e6-4741-7d3b-8218005ad40e" [ 2916.010156] env[62875]: _type = "Task" [ 2916.010156] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2916.017210] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b48ce8-02e6-4741-7d3b-8218005ad40e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2916.520741] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b48ce8-02e6-4741-7d3b-8218005ad40e, 'name': SearchDatastore_Task, 'duration_secs': 0.009137} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2916.521084] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/a1f49a40-fc6e-4323-a6fa-28464cfb8894" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2916.521309] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/e4e7ef29-a2b8-4d7f-83de-20587ed24a8b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2916.521432] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/e4e7ef29-a2b8-4d7f-83de-20587ed24a8b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2916.521746] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e4e7ef29-a2b8-4d7f-83de-20587ed24a8b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2916.522015] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8cad970-2db8-4e7b-92f5-85f5c9232727 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2916.526174] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2916.526174] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52613646-9b7f-8bf7-eb97-04a2a8cae330" [ 2916.526174] env[62875]: _type = "Task" [ 2916.526174] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2916.533398] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52613646-9b7f-8bf7-eb97-04a2a8cae330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2917.037929] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52613646-9b7f-8bf7-eb97-04a2a8cae330, 'name': SearchDatastore_Task, 'duration_secs': 0.009484} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2917.038303] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/e4e7ef29-a2b8-4d7f-83de-20587ed24a8b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2917.038480] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2917.038599] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2917.038909] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2917.039184] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0977aa5-f1f3-4068-b9f1-0e5307103cb0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2917.043267] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2917.043267] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b0a63-1449-058f-cbc9-f696446313e2" [ 2917.043267] env[62875]: _type = "Task" [ 2917.043267] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2917.050338] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b0a63-1449-058f-cbc9-f696446313e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2917.554092] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520b0a63-1449-058f-cbc9-f696446313e2, 'name': SearchDatastore_Task, 'duration_secs': 0.009516} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2917.554355] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore1] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee/ts-2025-01-25-05-29-42 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2917.554604] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15809c14-5c82-4ccc-8f32-fda7793675ba {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2917.566804] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore1] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee/ts-2025-01-25-05-29-42 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2917.566939] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image f9addc8a-f291-4e7a-a1c5-93144e13b3ee is no longer used by this node. Pending deletion! [ 2917.567118] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2917.567328] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2917.567445] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2917.567743] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2917.567963] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81a1c2fe-852c-429f-9a69-bd2bed606987 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2917.572080] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2917.572080] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bc4ab4-c5b6-a675-6062-c512931aea39" [ 2917.572080] env[62875]: _type = "Task" [ 2917.572080] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2917.579849] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bc4ab4-c5b6-a675-6062-c512931aea39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2918.083154] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bc4ab4-c5b6-a675-6062-c512931aea39, 'name': SearchDatastore_Task, 'duration_secs': 0.008858} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2918.083514] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore1] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096 is no longer used. Deleting! [ 2918.083642] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore1] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2918.083822] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc5e05dd-5d8c-4131-92c4-d4a3163a73f3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2918.090102] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2918.090102] env[62875]: value = "task-2180843" [ 2918.090102] env[62875]: _type = "Task" [ 2918.090102] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2918.097282] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2918.600273] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12736} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2918.600525] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2918.600651] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2918.600872] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/6e470760-a9cb-40f3-b61d-bde9da6906e1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2918.600991] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/6e470760-a9cb-40f3-b61d-bde9da6906e1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2918.601312] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6e470760-a9cb-40f3-b61d-bde9da6906e1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2918.601565] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8a65b75-6cff-4865-95c8-93b21c96e91b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2918.605612] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2918.605612] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b2692a-525b-b29f-207f-c2993bb45e35" [ 2918.605612] env[62875]: _type = "Task" [ 2918.605612] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2918.612560] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b2692a-525b-b29f-207f-c2993bb45e35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2919.115970] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b2692a-525b-b29f-207f-c2993bb45e35, 'name': SearchDatastore_Task, 'duration_secs': 0.012201} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2919.116365] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/6e470760-a9cb-40f3-b61d-bde9da6906e1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2919.116544] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/5c3d7e9e-3203-4f1e-a6b5-9617ec5d2489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2919.116658] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/5c3d7e9e-3203-4f1e-a6b5-9617ec5d2489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2919.116975] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5c3d7e9e-3203-4f1e-a6b5-9617ec5d2489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2919.117250] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07f33423-1ed8-4885-bb70-8b632ad06cb5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2919.121227] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2919.121227] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524f8caa-db6b-fc96-d657-c5b8dd334d0a" [ 2919.121227] env[62875]: _type = "Task" [ 2919.121227] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2919.128043] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524f8caa-db6b-fc96-d657-c5b8dd334d0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2919.632011] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524f8caa-db6b-fc96-d657-c5b8dd334d0a, 'name': SearchDatastore_Task, 'duration_secs': 0.009587} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2919.632354] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/5c3d7e9e-3203-4f1e-a6b5-9617ec5d2489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2919.632749] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2919.632749] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2919.632942] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2919.633265] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-206160a8-c8ef-4a10-8f9d-673137c4b25b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2919.637809] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2919.637809] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525c61c7-a827-4827-5e01-6e99d7c32e4e" [ 2919.637809] env[62875]: _type = "Task" [ 2919.637809] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2919.644975] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525c61c7-a827-4827-5e01-6e99d7c32e4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2920.148312] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525c61c7-a827-4827-5e01-6e99d7c32e4e, 'name': SearchDatastore_Task, 'duration_secs': 0.009797} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2920.148661] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/ts-2025-01-25-05-29-44 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2920.148781] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3449acd8-a0e0-4cd7-bad1-86d81d493b78 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.161838] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c/ts-2025-01-25-05-29-44 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2920.162024] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image c3db3f4b-7101-488a-81eb-25039e53d59c is no longer used by this node. Pending deletion! [ 2920.162132] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/c3db3f4b-7101-488a-81eb-25039e53d59c" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2920.162344] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/30a38d5c-4545-4734-8eee-38fdf9c65d45" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2920.162459] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/30a38d5c-4545-4734-8eee-38fdf9c65d45" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2920.162775] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/30a38d5c-4545-4734-8eee-38fdf9c65d45" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2920.163033] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a46581b2-df9e-430d-b8db-0cbb39f407d4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.167437] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2920.167437] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac4503-6dae-3870-81d7-b21684e2b199" [ 2920.167437] env[62875]: _type = "Task" [ 2920.167437] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2920.174482] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac4503-6dae-3870-81d7-b21684e2b199, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2920.678802] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac4503-6dae-3870-81d7-b21684e2b199, 'name': SearchDatastore_Task, 'duration_secs': 0.009307} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2920.679148] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/30a38d5c-4545-4734-8eee-38fdf9c65d45" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2920.679370] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/28b25650-682b-49f9-8bc2-595584aa2876" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2920.679491] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/28b25650-682b-49f9-8bc2-595584aa2876" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2920.679826] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/28b25650-682b-49f9-8bc2-595584aa2876" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2920.680099] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ae1a6a8-59b1-4188-b88f-04c37237d2c8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.684319] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2920.684319] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525ea787-b59d-a834-e791-236cbd0d5766" [ 2920.684319] env[62875]: _type = "Task" [ 2920.684319] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2920.691428] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525ea787-b59d-a834-e791-236cbd0d5766, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2921.194912] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525ea787-b59d-a834-e791-236cbd0d5766, 'name': SearchDatastore_Task, 'duration_secs': 0.010049} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2921.195294] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/28b25650-682b-49f9-8bc2-595584aa2876" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2921.195480] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/b78263dd-e0be-466b-8bd4-57102a1e65e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2921.195600] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/b78263dd-e0be-466b-8bd4-57102a1e65e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2921.195944] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b78263dd-e0be-466b-8bd4-57102a1e65e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2921.196235] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c5457ce-de64-4941-883b-43ad0c85a239 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2921.200571] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2921.200571] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524c310b-aa42-1d6d-a224-384bc8c7518f" [ 2921.200571] env[62875]: _type = "Task" [ 2921.200571] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2921.207808] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524c310b-aa42-1d6d-a224-384bc8c7518f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2921.711024] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524c310b-aa42-1d6d-a224-384bc8c7518f, 'name': SearchDatastore_Task, 'duration_secs': 0.010158} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2921.711375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/b78263dd-e0be-466b-8bd4-57102a1e65e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2921.711599] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2921.711721] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2921.712039] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2921.712304] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7dbbceb-21e3-43c8-96bd-e80e504fa9b8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2921.716462] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2921.716462] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e0978b-e7c1-501f-8878-03fc6c9e74ba" [ 2921.716462] env[62875]: _type = "Task" [ 2921.716462] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2921.723470] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e0978b-e7c1-501f-8878-03fc6c9e74ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2922.226663] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e0978b-e7c1-501f-8878-03fc6c9e74ba, 'name': SearchDatastore_Task, 'duration_secs': 0.009461} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2922.227056] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore1] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229/ts-2025-01-25-05-29-47 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2922.227232] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45154d9d-b537-4547-8547-37b1ee0f48b0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2922.239472] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore1] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229/ts-2025-01-25-05-29-47 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2922.239667] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image ca67da56-91ff-4a3f-8660-2df3b9b08229 is no longer used by this node. Pending deletion! [ 2922.239793] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2922.240016] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/f64af33e-b73d-4498-a89f-63fc2bfaf707" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2922.240140] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/f64af33e-b73d-4498-a89f-63fc2bfaf707" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2922.240508] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f64af33e-b73d-4498-a89f-63fc2bfaf707" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2922.240762] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fed81454-9766-4f9b-b0fc-f56fba18b567 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2922.244934] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2922.244934] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5254ec9d-fc70-fac8-7301-c8c176ec88d9" [ 2922.244934] env[62875]: _type = "Task" [ 2922.244934] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2922.251749] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5254ec9d-fc70-fac8-7301-c8c176ec88d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2922.755922] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5254ec9d-fc70-fac8-7301-c8c176ec88d9, 'name': SearchDatastore_Task, 'duration_secs': 0.008577} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2922.756260] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/f64af33e-b73d-4498-a89f-63fc2bfaf707" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2922.756480] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/db238a3e-faea-47d3-864c-ef3712b5582e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2922.756600] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/db238a3e-faea-47d3-864c-ef3712b5582e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2922.756911] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/db238a3e-faea-47d3-864c-ef3712b5582e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2922.757181] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54f845b7-4c24-4898-8ef7-dccdbd6f2608 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2922.761675] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2922.761675] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52af060a-31c9-12cb-0972-9c2161b37036" [ 2922.761675] env[62875]: _type = "Task" [ 2922.761675] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2922.769213] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52af060a-31c9-12cb-0972-9c2161b37036, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2923.272665] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52af060a-31c9-12cb-0972-9c2161b37036, 'name': SearchDatastore_Task, 'duration_secs': 0.01061} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2923.273051] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/db238a3e-faea-47d3-864c-ef3712b5582e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2923.273235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/25780f2f-e56a-45e7-b5f2-e3b74fa48aca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2923.273363] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/25780f2f-e56a-45e7-b5f2-e3b74fa48aca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2923.273674] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/25780f2f-e56a-45e7-b5f2-e3b74fa48aca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2923.273940] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b15fa7e-1837-456d-a003-6a20f7720bcb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2923.278480] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2923.278480] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f987c5-1f02-fffb-275f-0254472525d2" [ 2923.278480] env[62875]: _type = "Task" [ 2923.278480] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2923.285389] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f987c5-1f02-fffb-275f-0254472525d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2923.789200] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f987c5-1f02-fffb-275f-0254472525d2, 'name': SearchDatastore_Task, 'duration_secs': 0.010885} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2923.789505] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore1] devstack-image-cache_base/25780f2f-e56a-45e7-b5f2-e3b74fa48aca is no longer used. Deleting! [ 2923.789681] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore1] devstack-image-cache_base/25780f2f-e56a-45e7-b5f2-e3b74fa48aca {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2923.789940] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3196e1f-b1e5-4b19-a63f-8bbb50f27b26 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2923.796503] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2923.796503] env[62875]: value = "task-2180844" [ 2923.796503] env[62875]: _type = "Task" [ 2923.796503] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2923.803622] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2924.306390] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104796} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2924.306775] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2924.306775] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/25780f2f-e56a-45e7-b5f2-e3b74fa48aca" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2924.306980] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/3e5f1942-f4be-4611-a216-a587c28948bf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2924.307116] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/3e5f1942-f4be-4611-a216-a587c28948bf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2924.307431] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3e5f1942-f4be-4611-a216-a587c28948bf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2924.307685] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e62dba1d-089d-4a47-92ac-d1dbc40f1517 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2924.312694] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2924.312694] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c32ff0-f3be-614c-8e83-212eccc409d6" [ 2924.312694] env[62875]: _type = "Task" [ 2924.312694] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2924.319756] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c32ff0-f3be-614c-8e83-212eccc409d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2924.823416] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c32ff0-f3be-614c-8e83-212eccc409d6, 'name': SearchDatastore_Task, 'duration_secs': 0.008313} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2924.823672] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore1] devstack-image-cache_base/3e5f1942-f4be-4611-a216-a587c28948bf/ts-2025-01-25-05-29-49 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2924.823918] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52133648-6ccb-4ce8-bc2f-b98659e2198c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2924.836612] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore1] devstack-image-cache_base/3e5f1942-f4be-4611-a216-a587c28948bf/ts-2025-01-25-05-29-49 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2924.836765] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image 3e5f1942-f4be-4611-a216-a587c28948bf is no longer used by this node. Pending deletion! [ 2924.836941] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/3e5f1942-f4be-4611-a216-a587c28948bf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2924.837173] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/1b9133c4-31d1-4883-aa5b-d6732541fc7e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2924.837295] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/1b9133c4-31d1-4883-aa5b-d6732541fc7e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2924.837608] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1b9133c4-31d1-4883-aa5b-d6732541fc7e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2924.837842] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d8dec25-a737-42c5-ba6a-fb60515ddc5e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2924.842211] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2924.842211] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5287984a-a3aa-136f-ab4e-879b639fecfa" [ 2924.842211] env[62875]: _type = "Task" [ 2924.842211] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2924.849926] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5287984a-a3aa-136f-ab4e-879b639fecfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2925.353128] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5287984a-a3aa-136f-ab4e-879b639fecfa, 'name': SearchDatastore_Task, 'duration_secs': 0.009654} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2925.353565] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/1b9133c4-31d1-4883-aa5b-d6732541fc7e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2925.353708] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/e592933c-11ec-425c-88e6-78116a718538" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2925.353836] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/e592933c-11ec-425c-88e6-78116a718538" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2925.354147] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e592933c-11ec-425c-88e6-78116a718538" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2925.354410] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-309abe57-7697-451a-999e-1b82623e83d8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2925.359027] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2925.359027] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5265a46c-14df-63f2-d762-70792d2fb6e9" [ 2925.359027] env[62875]: _type = "Task" [ 2925.359027] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2925.366028] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5265a46c-14df-63f2-d762-70792d2fb6e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2925.869733] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5265a46c-14df-63f2-d762-70792d2fb6e9, 'name': SearchDatastore_Task, 'duration_secs': 0.00953} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2925.870083] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/e592933c-11ec-425c-88e6-78116a718538" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2925.870311] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/1f4ec190-4744-4525-a739-b7944786abcb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2925.870431] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/1f4ec190-4744-4525-a739-b7944786abcb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2925.870774] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1f4ec190-4744-4525-a739-b7944786abcb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2925.871039] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c312a5d6-820f-439c-a1b0-17bebd691be6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2925.875085] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2925.875085] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525cd172-9ac9-3b10-b22c-69282e0e93ac" [ 2925.875085] env[62875]: _type = "Task" [ 2925.875085] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2925.882047] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525cd172-9ac9-3b10-b22c-69282e0e93ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2926.385931] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525cd172-9ac9-3b10-b22c-69282e0e93ac, 'name': SearchDatastore_Task, 'duration_secs': 0.00925} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2926.386424] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/1f4ec190-4744-4525-a739-b7944786abcb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2926.386471] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/73a4a7cf-2b94-4127-beee-11e5a126b74d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2926.386595] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/73a4a7cf-2b94-4127-beee-11e5a126b74d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2926.386916] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/73a4a7cf-2b94-4127-beee-11e5a126b74d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2926.387178] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95fb6965-db84-446d-a20f-fd66624d9cde {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2926.391874] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2926.391874] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5201d425-297a-b141-aef0-2350d952a5f6" [ 2926.391874] env[62875]: _type = "Task" [ 2926.391874] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2926.399252] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5201d425-297a-b141-aef0-2350d952a5f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2926.902386] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5201d425-297a-b141-aef0-2350d952a5f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009088} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2926.902706] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/73a4a7cf-2b94-4127-beee-11e5a126b74d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2926.902933] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2926.903063] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2926.903400] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2926.903644] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-605a9357-c88e-48f5-a2d8-9236cea8500e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2926.908246] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2926.908246] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5234a96a-1e3c-74c0-7cce-aa858d0f688d" [ 2926.908246] env[62875]: _type = "Task" [ 2926.908246] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2926.915353] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5234a96a-1e3c-74c0-7cce-aa858d0f688d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2927.421493] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5234a96a-1e3c-74c0-7cce-aa858d0f688d, 'name': SearchDatastore_Task, 'duration_secs': 0.01838} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2927.421915] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2927.421981] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2927.422114] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2927.422377] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2927.422657] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bd73107-6a41-46f5-88e4-5df154281539 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2927.426895] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2927.426895] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b23dd6-2100-8ebb-d677-d83faef7caec" [ 2927.426895] env[62875]: _type = "Task" [ 2927.426895] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2927.434658] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b23dd6-2100-8ebb-d677-d83faef7caec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2927.937195] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b23dd6-2100-8ebb-d677-d83faef7caec, 'name': SearchDatastore_Task, 'duration_secs': 0.008812} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2927.937434] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore1] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2927.937747] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e99cdf5a-c408-4376-a96e-d0e512823fa5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2927.942153] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2927.942153] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a1b85d-f5e3-3eea-6972-d1d0d65f5a9a" [ 2927.942153] env[62875]: _type = "Task" [ 2927.942153] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2927.951639] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a1b85d-f5e3-3eea-6972-d1d0d65f5a9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2928.463125] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a1b85d-f5e3-3eea-6972-d1d0d65f5a9a, 'name': SearchDatastore_Task, 'duration_secs': 0.046475} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2928.463528] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/6159033f-c459-4365-b259-a8618572041a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2928.463592] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/6159033f-c459-4365-b259-a8618572041a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2928.463946] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6159033f-c459-4365-b259-a8618572041a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2928.464250] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29aff337-213b-4ffe-88e4-9b0981d88417 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2928.468918] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2928.468918] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52aa3924-9848-d596-9beb-252a89dbd5b7" [ 2928.468918] env[62875]: _type = "Task" [ 2928.468918] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2928.476437] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52aa3924-9848-d596-9beb-252a89dbd5b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2928.979850] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52aa3924-9848-d596-9beb-252a89dbd5b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010374} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2928.980218] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/6159033f-c459-4365-b259-a8618572041a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2928.980438] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/f1dbf878-7c30-4ecf-a77b-52f1cf7dccbb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2928.980561] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/f1dbf878-7c30-4ecf-a77b-52f1cf7dccbb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2928.980912] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f1dbf878-7c30-4ecf-a77b-52f1cf7dccbb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2928.981210] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d86ddb5-dd8a-4d51-9b32-3c2386ad8def {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2928.985822] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2928.985822] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521447bd-240a-a742-a3a5-b6d71c1f7ef3" [ 2928.985822] env[62875]: _type = "Task" [ 2928.985822] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2928.993187] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521447bd-240a-a742-a3a5-b6d71c1f7ef3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2929.496750] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521447bd-240a-a742-a3a5-b6d71c1f7ef3, 'name': SearchDatastore_Task, 'duration_secs': 0.010248} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2929.496750] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/f1dbf878-7c30-4ecf-a77b-52f1cf7dccbb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2929.497170] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/cc71ec5f-a7bf-4453-ace1-c39b4f41b572" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2929.497170] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/cc71ec5f-a7bf-4453-ace1-c39b4f41b572" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2929.497353] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc71ec5f-a7bf-4453-ace1-c39b4f41b572" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2929.497573] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ec19e3-49a8-4cd2-a0f8-0025189befc6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2929.501708] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2929.501708] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522ea41b-b2ba-d287-91fd-bed2f6c79fad" [ 2929.501708] env[62875]: _type = "Task" [ 2929.501708] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2929.508865] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522ea41b-b2ba-d287-91fd-bed2f6c79fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2930.012430] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522ea41b-b2ba-d287-91fd-bed2f6c79fad, 'name': SearchDatastore_Task, 'duration_secs': 0.009716} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2930.012760] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/cc71ec5f-a7bf-4453-ace1-c39b4f41b572" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2930.012990] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/77bf4d8d-5a6c-48b4-ada0-9b685bee2ba3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2930.013126] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/77bf4d8d-5a6c-48b4-ada0-9b685bee2ba3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2930.013448] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/77bf4d8d-5a6c-48b4-ada0-9b685bee2ba3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2930.013712] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b101741d-d5b3-48ae-82fc-e0561776cd1f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2930.017908] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2930.017908] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528df705-2aa7-9faa-220b-09b410949a65" [ 2930.017908] env[62875]: _type = "Task" [ 2930.017908] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2930.025105] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528df705-2aa7-9faa-220b-09b410949a65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2930.529504] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528df705-2aa7-9faa-220b-09b410949a65, 'name': SearchDatastore_Task, 'duration_secs': 0.010547} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2930.529935] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/77bf4d8d-5a6c-48b4-ada0-9b685bee2ba3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2930.530081] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2930.530202] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2930.530499] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2930.530758] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3cb9979-fecc-427b-a192-504163e900be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2930.535158] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2930.535158] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4d03-a8a5-93c9-abf9-206286b0a9c4" [ 2930.535158] env[62875]: _type = "Task" [ 2930.535158] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2930.541960] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4d03-a8a5-93c9-abf9-206286b0a9c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2931.046062] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52dc4d03-a8a5-93c9-abf9-206286b0a9c4, 'name': SearchDatastore_Task, 'duration_secs': 0.010695} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2931.046369] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d is no longer used. Deleting! [ 2931.046508] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2931.046763] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80aa7f29-a8fb-40af-8731-0ae448b39d3c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2931.052349] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2931.052349] env[62875]: value = "task-2180845" [ 2931.052349] env[62875]: _type = "Task" [ 2931.052349] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2931.059575] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2931.562215] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099805} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2931.562645] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2931.562645] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/1c5c1547-ef97-4956-9548-0aa47fc3615d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2931.562784] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/5040a39a-633b-42a5-a240-f4257866c4a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2931.562901] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/5040a39a-633b-42a5-a240-f4257866c4a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2931.563235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5040a39a-633b-42a5-a240-f4257866c4a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2931.563483] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-706c9d06-ec7d-44ec-8835-ebf5019907ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2931.567497] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2931.567497] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e6b9f4-ccea-0f5d-d742-90b372cf8d04" [ 2931.567497] env[62875]: _type = "Task" [ 2931.567497] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2931.574411] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e6b9f4-ccea-0f5d-d742-90b372cf8d04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2932.077874] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e6b9f4-ccea-0f5d-d742-90b372cf8d04, 'name': SearchDatastore_Task, 'duration_secs': 0.010376} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2932.078224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/5040a39a-633b-42a5-a240-f4257866c4a0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2932.078451] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/76bd5bc2-baa0-446f-8587-40c0b13c7c83" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2932.078571] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/76bd5bc2-baa0-446f-8587-40c0b13c7c83" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2932.078954] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/76bd5bc2-baa0-446f-8587-40c0b13c7c83" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2932.079233] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7384e220-8283-4068-a4d9-63ad9309e3af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2932.083748] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2932.083748] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5247c39e-4a6f-c31f-ff4e-9fd3e2ac6271" [ 2932.083748] env[62875]: _type = "Task" [ 2932.083748] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2932.090698] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5247c39e-4a6f-c31f-ff4e-9fd3e2ac6271, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2932.595159] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5247c39e-4a6f-c31f-ff4e-9fd3e2ac6271, 'name': SearchDatastore_Task, 'duration_secs': 0.009916} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2932.595578] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/76bd5bc2-baa0-446f-8587-40c0b13c7c83" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2932.595746] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/056702aa-ae9e-4448-a5b2-f816326b92a6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2932.595871] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/056702aa-ae9e-4448-a5b2-f816326b92a6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2932.596224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/056702aa-ae9e-4448-a5b2-f816326b92a6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2932.596500] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05c06fd6-ff39-4f6c-9548-c1c7862483e5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2932.600895] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2932.600895] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d563a9-9752-a56c-b7ba-1c81d9a4db61" [ 2932.600895] env[62875]: _type = "Task" [ 2932.600895] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2932.608126] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d563a9-9752-a56c-b7ba-1c81d9a4db61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2933.111764] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d563a9-9752-a56c-b7ba-1c81d9a4db61, 'name': SearchDatastore_Task, 'duration_secs': 0.009632} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2933.112115] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/056702aa-ae9e-4448-a5b2-f816326b92a6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2933.112344] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/a1f49a40-fc6e-4323-a6fa-28464cfb8894" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2933.112462] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/a1f49a40-fc6e-4323-a6fa-28464cfb8894" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2933.112794] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a1f49a40-fc6e-4323-a6fa-28464cfb8894" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2933.113070] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a474ef9-6433-49e7-8a62-35730b2c55c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2933.117262] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2933.117262] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d1c8c9-f922-502d-848a-eb5e63f8675a" [ 2933.117262] env[62875]: _type = "Task" [ 2933.117262] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2933.124561] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d1c8c9-f922-502d-848a-eb5e63f8675a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2933.627517] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d1c8c9-f922-502d-848a-eb5e63f8675a, 'name': SearchDatastore_Task, 'duration_secs': 0.009827} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2933.627857] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/a1f49a40-fc6e-4323-a6fa-28464cfb8894" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2933.628095] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/8c6e7740-5b2a-4687-afba-b1b680f71a88" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2933.628216] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/8c6e7740-5b2a-4687-afba-b1b680f71a88" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2933.628550] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c6e7740-5b2a-4687-afba-b1b680f71a88" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2933.628811] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd2afc06-fc17-492a-b562-a4260e06502a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2933.632975] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2933.632975] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c879c-54a0-7708-bd0d-a9bfa4de66bc" [ 2933.632975] env[62875]: _type = "Task" [ 2933.632975] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2933.640204] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c879c-54a0-7708-bd0d-a9bfa4de66bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2934.144520] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c879c-54a0-7708-bd0d-a9bfa4de66bc, 'name': SearchDatastore_Task, 'duration_secs': 0.009936} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2934.144847] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/8c6e7740-5b2a-4687-afba-b1b680f71a88" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2934.145078] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/ac486cad-34b2-4df5-8b2c-5817a3136170" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2934.145196] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/ac486cad-34b2-4df5-8b2c-5817a3136170" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2934.145502] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ac486cad-34b2-4df5-8b2c-5817a3136170" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2934.145758] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43cd60b2-c4f5-4826-8f67-5e89e39d97c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2934.150377] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2934.150377] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cecc3e-4dda-cc32-d150-00a4ac0c5c7a" [ 2934.150377] env[62875]: _type = "Task" [ 2934.150377] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2934.157524] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cecc3e-4dda-cc32-d150-00a4ac0c5c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2934.660705] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cecc3e-4dda-cc32-d150-00a4ac0c5c7a, 'name': SearchDatastore_Task, 'duration_secs': 0.008812} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2934.661020] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore2] devstack-image-cache_base/ac486cad-34b2-4df5-8b2c-5817a3136170/ts-2025-01-25-05-29-59 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2934.661247] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6df88f3-2fe6-4bcc-9425-fc909056aa04 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2934.673025] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore2] devstack-image-cache_base/ac486cad-34b2-4df5-8b2c-5817a3136170/ts-2025-01-25-05-29-59 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2934.673164] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image ac486cad-34b2-4df5-8b2c-5817a3136170 is no longer used by this node. Pending deletion! [ 2934.673327] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/ac486cad-34b2-4df5-8b2c-5817a3136170" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2934.673538] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/e0978245-b532-4695-8f53-f303d4e0fedc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2934.673651] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/e0978245-b532-4695-8f53-f303d4e0fedc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2934.673971] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e0978245-b532-4695-8f53-f303d4e0fedc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2934.674204] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-447439b9-7587-40b3-9418-b40c6bf495d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2934.678011] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2934.678011] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f469ec-6643-d33d-48f2-8362a4e712c0" [ 2934.678011] env[62875]: _type = "Task" [ 2934.678011] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2934.684864] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f469ec-6643-d33d-48f2-8362a4e712c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2935.188989] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f469ec-6643-d33d-48f2-8362a4e712c0, 'name': SearchDatastore_Task, 'duration_secs': 0.007988} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2935.189404] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/e0978245-b532-4695-8f53-f303d4e0fedc" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2935.189701] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/9e4cb130-aa86-4d93-bc3a-de51b9c80e56" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2935.189857] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/9e4cb130-aa86-4d93-bc3a-de51b9c80e56" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2935.190213] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/9e4cb130-aa86-4d93-bc3a-de51b9c80e56" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2935.190509] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16105522-d19f-4028-af41-5294d48776b6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2935.194789] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2935.194789] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e026a0-0216-2416-5e9f-583a3899b88b" [ 2935.194789] env[62875]: _type = "Task" [ 2935.194789] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2935.202038] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e026a0-0216-2416-5e9f-583a3899b88b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2935.705436] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e026a0-0216-2416-5e9f-583a3899b88b, 'name': SearchDatastore_Task, 'duration_secs': 0.010058} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2935.705784] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore2] devstack-image-cache_base/9e4cb130-aa86-4d93-bc3a-de51b9c80e56/ts-2025-01-25-05-30-00 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2935.705968] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc4758ec-70f2-413d-9d70-f264adf62222 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2935.719632] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore2] devstack-image-cache_base/9e4cb130-aa86-4d93-bc3a-de51b9c80e56/ts-2025-01-25-05-30-00 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2935.719796] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image 9e4cb130-aa86-4d93-bc3a-de51b9c80e56 is no longer used by this node. Pending deletion! [ 2935.719948] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/9e4cb130-aa86-4d93-bc3a-de51b9c80e56" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2935.720177] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2935.720294] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2935.720604] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2935.720820] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fad5f653-07ba-4120-ae94-4c34b6c4ba60 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2935.724771] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2935.724771] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52175746-a481-9b23-d1a7-5a441cc95b25" [ 2935.724771] env[62875]: _type = "Task" [ 2935.724771] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2935.731591] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52175746-a481-9b23-d1a7-5a441cc95b25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2935.803668] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2935.804140] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2935.804471] env[62875]: INFO nova.compute.manager [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Shelving [ 2936.235572] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52175746-a481-9b23-d1a7-5a441cc95b25, 'name': SearchDatastore_Task, 'duration_secs': 0.012465} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2936.235879] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096 is no longer used. Deleting! [ 2936.236035] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2936.236288] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e9b0f5d-1a91-41a2-b9e8-dd7cd24abfe6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2936.241929] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2936.241929] env[62875]: value = "task-2180846" [ 2936.241929] env[62875]: _type = "Task" [ 2936.241929] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2936.250175] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2936.751354] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1288} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2936.751678] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2936.751754] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/43645b02-e432-40bc-b5e9-013bd8dc0096" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2936.751913] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/e590fabd-223b-4b09-b527-39532684822d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2936.752043] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/e590fabd-223b-4b09-b527-39532684822d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2936.752361] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e590fabd-223b-4b09-b527-39532684822d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2936.752620] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7952cef1-82c5-4cb9-ba89-3a1699540a68 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2936.757333] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2936.757333] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac61cc-7b41-0bc4-4bc8-a04bd53177a9" [ 2936.757333] env[62875]: _type = "Task" [ 2936.757333] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2936.764854] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac61cc-7b41-0bc4-4bc8-a04bd53177a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2936.813591] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2936.813866] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44aa58c3-57c8-4bc9-adb4-308bfa92682e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2936.820869] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2936.820869] env[62875]: value = "task-2180847" [ 2936.820869] env[62875]: _type = "Task" [ 2936.820869] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2936.828399] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180847, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2937.268303] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ac61cc-7b41-0bc4-4bc8-a04bd53177a9, 'name': SearchDatastore_Task, 'duration_secs': 0.012496} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2937.268645] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/e590fabd-223b-4b09-b527-39532684822d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2937.268873] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/b79a506c-a973-45f6-93da-a6293ccf357d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2937.268994] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/b79a506c-a973-45f6-93da-a6293ccf357d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2937.269339] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b79a506c-a973-45f6-93da-a6293ccf357d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2937.269609] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a4bb72c-220d-4ff9-bc6a-dba05cd083a1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.273929] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2937.273929] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5208a722-f1c8-c715-3c3e-6642a7b56e2f" [ 2937.273929] env[62875]: _type = "Task" [ 2937.273929] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2937.280940] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5208a722-f1c8-c715-3c3e-6642a7b56e2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2937.328608] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180847, 'name': PowerOffVM_Task, 'duration_secs': 0.19368} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2937.328846] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2937.329585] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2016bf47-05ff-437a-95b3-868df8b31c05 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.346960] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8baf3601-124c-4845-a2fb-f606ee449fef {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.784716] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5208a722-f1c8-c715-3c3e-6642a7b56e2f, 'name': SearchDatastore_Task, 'duration_secs': 0.010007} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2937.785081] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/b79a506c-a973-45f6-93da-a6293ccf357d is no longer used. Deleting! [ 2937.785188] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/b79a506c-a973-45f6-93da-a6293ccf357d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2937.785466] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7056d73a-0db9-4441-be69-7134cbfc263a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.791526] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2937.791526] env[62875]: value = "task-2180848" [ 2937.791526] env[62875]: _type = "Task" [ 2937.791526] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2937.798839] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2937.857250] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Creating Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2937.857526] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b9fc9be8-9e27-441b-b7c1-e91b9529382e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.864628] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2937.864628] env[62875]: value = "task-2180849" [ 2937.864628] env[62875]: _type = "Task" [ 2937.864628] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2937.872439] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180849, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2938.301478] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108267} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2938.301720] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2938.301862] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/b79a506c-a973-45f6-93da-a6293ccf357d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2938.302104] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/c9ce1adc-670a-40d0-853a-a46b7c797c1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2938.302224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/c9ce1adc-670a-40d0-853a-a46b7c797c1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2938.302553] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9ce1adc-670a-40d0-853a-a46b7c797c1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2938.302815] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-934eeb2a-ad46-4eaa-83f0-7db71c781d48 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2938.307049] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2938.307049] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e4b8ff-a64c-44c1-fc93-a011e8824f8f" [ 2938.307049] env[62875]: _type = "Task" [ 2938.307049] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2938.315241] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e4b8ff-a64c-44c1-fc93-a011e8824f8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2938.374546] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180849, 'name': CreateSnapshot_Task, 'duration_secs': 0.394913} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2938.374839] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Created Snapshot of the VM instance {{(pid=62875) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2938.375567] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8e6792-c611-45fd-82cf-42c7c278e469 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2938.817796] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e4b8ff-a64c-44c1-fc93-a011e8824f8f, 'name': SearchDatastore_Task, 'duration_secs': 0.009719} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2938.818163] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/c9ce1adc-670a-40d0-853a-a46b7c797c1d is no longer used. Deleting! [ 2938.818294] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/c9ce1adc-670a-40d0-853a-a46b7c797c1d {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2938.818547] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5990604a-a50c-4102-964d-6904eea00ec6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2938.824179] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2938.824179] env[62875]: value = "task-2180850" [ 2938.824179] env[62875]: _type = "Task" [ 2938.824179] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2938.831242] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180850, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2938.892451] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Creating linked-clone VM from snapshot {{(pid=62875) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2938.892672] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1454d56c-7502-40ce-9353-3e8728d1f784 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2938.901688] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2938.901688] env[62875]: value = "task-2180851" [ 2938.901688] env[62875]: _type = "Task" [ 2938.901688] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2938.909234] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180851, 'name': CloneVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2939.333803] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180850, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103325} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2939.334084] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2939.334234] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/c9ce1adc-670a-40d0-853a-a46b7c797c1d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2939.334465] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/b78263dd-e0be-466b-8bd4-57102a1e65e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2939.334584] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/b78263dd-e0be-466b-8bd4-57102a1e65e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2939.334907] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b78263dd-e0be-466b-8bd4-57102a1e65e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2939.335190] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef7f43bd-5882-40e1-8660-648bf183542a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2939.339488] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2939.339488] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bd9d2f-4d95-0d49-5aea-badf73712d36" [ 2939.339488] env[62875]: _type = "Task" [ 2939.339488] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2939.347752] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bd9d2f-4d95-0d49-5aea-badf73712d36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2939.411268] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180851, 'name': CloneVM_Task} progress is 94%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2939.850839] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bd9d2f-4d95-0d49-5aea-badf73712d36, 'name': SearchDatastore_Task, 'duration_secs': 0.00908} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2939.851187] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/b78263dd-e0be-466b-8bd4-57102a1e65e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2939.851290] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/b017f94f-a28c-4981-8e18-563e4f102192" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2939.851411] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/b017f94f-a28c-4981-8e18-563e4f102192" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2939.851743] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b017f94f-a28c-4981-8e18-563e4f102192" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2939.852032] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b8d4d7e-dd4c-43b4-ae8f-39a73891b5ce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2939.856969] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2939.856969] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e1e367-8fc1-9685-54ed-557085502c21" [ 2939.856969] env[62875]: _type = "Task" [ 2939.856969] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2939.864565] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e1e367-8fc1-9685-54ed-557085502c21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2939.910828] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180851, 'name': CloneVM_Task} progress is 95%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2940.368382] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e1e367-8fc1-9685-54ed-557085502c21, 'name': SearchDatastore_Task, 'duration_secs': 0.010061} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2940.368706] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/b017f94f-a28c-4981-8e18-563e4f102192" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2940.368957] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/a8dbd113-f308-422a-aa48-d016b8abd3a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2940.369082] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/a8dbd113-f308-422a-aa48-d016b8abd3a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2940.369394] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a8dbd113-f308-422a-aa48-d016b8abd3a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2940.369650] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d10f5f0-2704-488d-a970-a1022abc9bcc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2940.373915] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2940.373915] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5279270d-df57-0b96-4faf-09327f38da07" [ 2940.373915] env[62875]: _type = "Task" [ 2940.373915] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2940.381442] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5279270d-df57-0b96-4faf-09327f38da07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2940.411072] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180851, 'name': CloneVM_Task, 'duration_secs': 1.122182} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2940.411297] env[62875]: INFO nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Created linked-clone VM from snapshot [ 2940.411965] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ca7aeb-ec36-42a9-a7a9-1f0121885f42 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2940.419536] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Uploading image 74a2b5f5-272a-42b9-93bb-7e3de925645f {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2940.438918] env[62875]: DEBUG oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2940.438918] env[62875]: value = "vm-445041" [ 2940.438918] env[62875]: _type = "VirtualMachine" [ 2940.438918] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2940.439140] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ce6fd280-c21f-440d-9469-fc1cfa23757c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2940.446560] env[62875]: DEBUG oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lease: (returnval){ [ 2940.446560] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52637e4c-1553-869b-c009-48f51ecaed9d" [ 2940.446560] env[62875]: _type = "HttpNfcLease" [ 2940.446560] env[62875]: } obtained for exporting VM: (result){ [ 2940.446560] env[62875]: value = "vm-445041" [ 2940.446560] env[62875]: _type = "VirtualMachine" [ 2940.446560] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2940.446560] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the lease: (returnval){ [ 2940.446560] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52637e4c-1553-869b-c009-48f51ecaed9d" [ 2940.446560] env[62875]: _type = "HttpNfcLease" [ 2940.446560] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2940.451978] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2940.451978] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52637e4c-1553-869b-c009-48f51ecaed9d" [ 2940.451978] env[62875]: _type = "HttpNfcLease" [ 2940.451978] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2940.884739] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5279270d-df57-0b96-4faf-09327f38da07, 'name': SearchDatastore_Task, 'duration_secs': 0.010396} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2940.885083] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/a8dbd113-f308-422a-aa48-d016b8abd3a1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2940.885312] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/1c1c4e21-94f5-4d1d-b330-21c177d6782e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2940.885432] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/1c1c4e21-94f5-4d1d-b330-21c177d6782e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2940.885740] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1c1c4e21-94f5-4d1d-b330-21c177d6782e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2940.885997] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8759e664-8449-4079-af13-64cb79883964 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2940.890151] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2940.890151] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e3aad0-7336-ca8d-70bf-69564fe50984" [ 2940.890151] env[62875]: _type = "Task" [ 2940.890151] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2940.897456] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e3aad0-7336-ca8d-70bf-69564fe50984, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2940.954807] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2940.954807] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52637e4c-1553-869b-c009-48f51ecaed9d" [ 2940.954807] env[62875]: _type = "HttpNfcLease" [ 2940.954807] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2940.955094] env[62875]: DEBUG oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2940.955094] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52637e4c-1553-869b-c009-48f51ecaed9d" [ 2940.955094] env[62875]: _type = "HttpNfcLease" [ 2940.955094] env[62875]: }. {{(pid=62875) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2940.955805] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292275b9-b6a0-429b-950a-8a1201dc1323 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2940.963058] env[62875]: DEBUG oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a80f96-1ddf-0716-49d8-177ea12b1b38/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2940.963244] env[62875]: DEBUG oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a80f96-1ddf-0716-49d8-177ea12b1b38/disk-0.vmdk for reading. {{(pid=62875) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2941.047616] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a8d12fab-ce8f-47da-90a5-71a33f1f213f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2941.401476] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e3aad0-7336-ca8d-70bf-69564fe50984, 'name': SearchDatastore_Task, 'duration_secs': 0.009717} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2941.402082] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/1c1c4e21-94f5-4d1d-b330-21c177d6782e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2941.402407] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/4d68725f-508b-4e27-a81d-cac5a75397a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2941.402570] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/4d68725f-508b-4e27-a81d-cac5a75397a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2941.403061] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d68725f-508b-4e27-a81d-cac5a75397a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2941.403345] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f896e51b-df7e-4e8c-9b05-f6d2f0bf4b03 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2941.408147] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2941.408147] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5257def3-f9e7-d76a-f0e9-51a259fe7b8b" [ 2941.408147] env[62875]: _type = "Task" [ 2941.408147] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2941.417238] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5257def3-f9e7-d76a-f0e9-51a259fe7b8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2941.919344] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5257def3-f9e7-d76a-f0e9-51a259fe7b8b, 'name': SearchDatastore_Task, 'duration_secs': 0.010699} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2941.919950] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/4d68725f-508b-4e27-a81d-cac5a75397a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2941.920270] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/1f4ec190-4744-4525-a739-b7944786abcb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2941.920482] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/1f4ec190-4744-4525-a739-b7944786abcb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2941.920957] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f4ec190-4744-4525-a739-b7944786abcb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2941.921268] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-543ff7ec-32a3-496f-9bed-13128f49e3ad {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2941.925813] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2941.925813] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ceb70a-7ff3-25c4-718f-d8502d8ea19e" [ 2941.925813] env[62875]: _type = "Task" [ 2941.925813] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2941.933951] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ceb70a-7ff3-25c4-718f-d8502d8ea19e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2942.436852] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ceb70a-7ff3-25c4-718f-d8502d8ea19e, 'name': SearchDatastore_Task, 'duration_secs': 0.019094} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2942.437307] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/1f4ec190-4744-4525-a739-b7944786abcb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2942.437606] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/0cfbb568-c701-41ec-b986-a9bba64f8630" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2942.437827] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/0cfbb568-c701-41ec-b986-a9bba64f8630" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2942.438218] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0cfbb568-c701-41ec-b986-a9bba64f8630" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2942.438541] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c63aaa8-b89e-4c78-a498-8f350efb3999 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2942.442956] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2942.442956] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52580634-bf18-df27-c6dc-181badfffd65" [ 2942.442956] env[62875]: _type = "Task" [ 2942.442956] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2942.450295] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52580634-bf18-df27-c6dc-181badfffd65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2942.953662] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52580634-bf18-df27-c6dc-181badfffd65, 'name': SearchDatastore_Task, 'duration_secs': 0.015797} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2942.954083] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/0cfbb568-c701-41ec-b986-a9bba64f8630 is no longer used. Deleting! [ 2942.954139] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/0cfbb568-c701-41ec-b986-a9bba64f8630 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2942.954409] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-228d18f4-2b41-4d72-8c9f-1a53dadd813c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2942.961070] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2942.961070] env[62875]: value = "task-2180853" [ 2942.961070] env[62875]: _type = "Task" [ 2942.961070] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2942.968922] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180853, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2943.471279] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180853, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218142} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2943.471490] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2943.471664] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/0cfbb568-c701-41ec-b986-a9bba64f8630" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2943.471886] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/5828d2c5-98c9-4678-a0da-8dc780fed4f6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2943.472016] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/5828d2c5-98c9-4678-a0da-8dc780fed4f6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2943.472380] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5828d2c5-98c9-4678-a0da-8dc780fed4f6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2943.472640] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7afe2db0-455e-496e-a58f-fecce0cffd33 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2943.476809] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2943.476809] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5210ff6b-8d59-d114-36d2-a0623f4581dc" [ 2943.476809] env[62875]: _type = "Task" [ 2943.476809] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2943.484380] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5210ff6b-8d59-d114-36d2-a0623f4581dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2943.988753] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5210ff6b-8d59-d114-36d2-a0623f4581dc, 'name': SearchDatastore_Task, 'duration_secs': 0.012573} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2943.989137] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/5828d2c5-98c9-4678-a0da-8dc780fed4f6 is no longer used. Deleting! [ 2943.989235] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/5828d2c5-98c9-4678-a0da-8dc780fed4f6 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2943.989482] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ec13d2e-f002-44d6-a10f-a9f24f93ae6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2943.996224] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2943.996224] env[62875]: value = "task-2180854" [ 2943.996224] env[62875]: _type = "Task" [ 2943.996224] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2944.004148] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180854, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2944.507093] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180854, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185171} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2944.507346] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2944.507504] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/5828d2c5-98c9-4678-a0da-8dc780fed4f6" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2944.507740] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/a019f6ce-eb2f-40ea-ab8e-2a4b3e85f40a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2944.507854] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/a019f6ce-eb2f-40ea-ab8e-2a4b3e85f40a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2944.508237] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a019f6ce-eb2f-40ea-ab8e-2a4b3e85f40a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2944.508531] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88c5a1f8-fa01-41a3-a078-52d3fba3c9dd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2944.513032] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2944.513032] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525da0d5-ad32-4e4e-73fa-0de67a2881e3" [ 2944.513032] env[62875]: _type = "Task" [ 2944.513032] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2944.520451] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525da0d5-ad32-4e4e-73fa-0de67a2881e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2945.023848] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525da0d5-ad32-4e4e-73fa-0de67a2881e3, 'name': SearchDatastore_Task, 'duration_secs': 0.014519} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2945.024254] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/a019f6ce-eb2f-40ea-ab8e-2a4b3e85f40a" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2945.024301] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/b0ae88c3-363b-47a2-bd13-599d16e6fcb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2945.024455] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/b0ae88c3-363b-47a2-bd13-599d16e6fcb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2945.024752] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b0ae88c3-363b-47a2-bd13-599d16e6fcb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2945.025049] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c019cc3a-f1f2-4916-8fee-8de7dee24d79 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.029485] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2945.029485] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c3e3b7-02f4-58eb-6aca-9b247188a2b4" [ 2945.029485] env[62875]: _type = "Task" [ 2945.029485] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2945.036843] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c3e3b7-02f4-58eb-6aca-9b247188a2b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2945.539957] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c3e3b7-02f4-58eb-6aca-9b247188a2b4, 'name': SearchDatastore_Task, 'duration_secs': 0.015755} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2945.540325] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/b0ae88c3-363b-47a2-bd13-599d16e6fcb8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2945.540562] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/00bcc075-3cdc-4736-9b9e-5adac3f4b9c5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2945.540681] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/00bcc075-3cdc-4736-9b9e-5adac3f4b9c5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2945.541083] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/00bcc075-3cdc-4736-9b9e-5adac3f4b9c5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2945.541369] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ba4cc68-bcf8-45e5-96ba-9d16ee4d812e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.546218] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2945.546218] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529e100d-56b4-86f0-0c7a-49cf7b4f06b8" [ 2945.546218] env[62875]: _type = "Task" [ 2945.546218] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2945.553760] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529e100d-56b4-86f0-0c7a-49cf7b4f06b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2946.057238] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529e100d-56b4-86f0-0c7a-49cf7b4f06b8, 'name': SearchDatastore_Task, 'duration_secs': 0.015941} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2946.057803] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/00bcc075-3cdc-4736-9b9e-5adac3f4b9c5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2946.057803] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/d7bf13c2-4dbb-4c73-b610-4fd93344d143" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2946.058050] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/d7bf13c2-4dbb-4c73-b610-4fd93344d143" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2946.058279] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d7bf13c2-4dbb-4c73-b610-4fd93344d143" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2946.058567] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17246586-f071-4f76-93a4-fbe177ed438d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.063286] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2946.063286] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c8c194-ab9a-2d13-7627-ad03c0a26e72" [ 2946.063286] env[62875]: _type = "Task" [ 2946.063286] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2946.070946] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c8c194-ab9a-2d13-7627-ad03c0a26e72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2946.573762] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c8c194-ab9a-2d13-7627-ad03c0a26e72, 'name': SearchDatastore_Task, 'duration_secs': 0.018767} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2946.574141] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/d7bf13c2-4dbb-4c73-b610-4fd93344d143" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2946.574379] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/e53430fd-f0c0-42df-b27d-6d957b02e551" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2946.574500] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/e53430fd-f0c0-42df-b27d-6d957b02e551" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2946.574834] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e53430fd-f0c0-42df-b27d-6d957b02e551" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2946.575126] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42ab3c19-e48e-4049-ba34-b20ec18f5974 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.579480] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2946.579480] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523470fd-f53b-a348-ccc9-c914ab3406fb" [ 2946.579480] env[62875]: _type = "Task" [ 2946.579480] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2946.586967] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523470fd-f53b-a348-ccc9-c914ab3406fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2947.090346] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]523470fd-f53b-a348-ccc9-c914ab3406fb, 'name': SearchDatastore_Task, 'duration_secs': 0.017033} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2947.090817] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/e53430fd-f0c0-42df-b27d-6d957b02e551" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2947.090900] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/e97bd828-a5f5-4603-ba1d-4b1e64b28d0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2947.091035] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/e97bd828-a5f5-4603-ba1d-4b1e64b28d0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2947.091383] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e97bd828-a5f5-4603-ba1d-4b1e64b28d0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2947.091650] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6b5494b-311b-4d35-b4d7-89c1ba949c1f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2947.096084] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2947.096084] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d8c4c9-0fc9-8813-807a-af61b9c9734a" [ 2947.096084] env[62875]: _type = "Task" [ 2947.096084] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2947.103398] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d8c4c9-0fc9-8813-807a-af61b9c9734a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2947.607365] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d8c4c9-0fc9-8813-807a-af61b9c9734a, 'name': SearchDatastore_Task, 'duration_secs': 0.01668} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2947.607741] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/e97bd828-a5f5-4603-ba1d-4b1e64b28d0b is no longer used. Deleting! [ 2947.607912] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/e97bd828-a5f5-4603-ba1d-4b1e64b28d0b {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2947.608227] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d0ed98b-2e9e-46ba-beac-fd092584c933 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2947.615364] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2947.615364] env[62875]: value = "task-2180855" [ 2947.615364] env[62875]: _type = "Task" [ 2947.615364] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2947.623139] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2948.125737] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230188} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2948.126121] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2948.126248] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/e97bd828-a5f5-4603-ba1d-4b1e64b28d0b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2948.126471] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/5c3d7e9e-3203-4f1e-a6b5-9617ec5d2489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2948.126588] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/5c3d7e9e-3203-4f1e-a6b5-9617ec5d2489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2948.126924] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5c3d7e9e-3203-4f1e-a6b5-9617ec5d2489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2948.127193] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38ad0d78-f02d-4ef0-b8b0-1eb7e6230203 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2948.131444] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2948.131444] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cb6c04-3ff7-db5e-21fe-793468943690" [ 2948.131444] env[62875]: _type = "Task" [ 2948.131444] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2948.138612] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cb6c04-3ff7-db5e-21fe-793468943690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2948.432239] env[62875]: DEBUG oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a80f96-1ddf-0716-49d8-177ea12b1b38/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2948.433127] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af335442-26dd-4566-a705-6fae8e27ab1e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2948.439066] env[62875]: DEBUG oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a80f96-1ddf-0716-49d8-177ea12b1b38/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2948.439212] env[62875]: ERROR oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a80f96-1ddf-0716-49d8-177ea12b1b38/disk-0.vmdk due to incomplete transfer. [ 2948.439413] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-867f398d-0433-47e1-bdbd-10927ac282d2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2948.445430] env[62875]: DEBUG oslo_vmware.rw_handles [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a80f96-1ddf-0716-49d8-177ea12b1b38/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2948.445613] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Uploaded image 74a2b5f5-272a-42b9-93bb-7e3de925645f to the Glance image server {{(pid=62875) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2948.447612] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Destroying the VM {{(pid=62875) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2948.447826] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bf0d34b3-6aa6-4ae6-a592-9d98d67acec0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2948.453269] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2948.453269] env[62875]: value = "task-2180856" [ 2948.453269] env[62875]: _type = "Task" [ 2948.453269] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2948.460251] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180856, 'name': Destroy_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2948.641680] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52cb6c04-3ff7-db5e-21fe-793468943690, 'name': SearchDatastore_Task, 'duration_secs': 0.044446} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2948.642017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/5c3d7e9e-3203-4f1e-a6b5-9617ec5d2489" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2948.642258] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/d008019d-b38c-46c2-84c5-4290d4b4510e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2948.642378] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/d008019d-b38c-46c2-84c5-4290d4b4510e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2948.642701] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d008019d-b38c-46c2-84c5-4290d4b4510e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2948.642972] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-343336c6-00ae-473a-a286-113286169508 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2948.647184] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2948.647184] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b7affb-5f38-6a7c-76c6-554e3e3611a0" [ 2948.647184] env[62875]: _type = "Task" [ 2948.647184] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2948.654254] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b7affb-5f38-6a7c-76c6-554e3e3611a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2948.962979] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180856, 'name': Destroy_Task} progress is 33%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2949.158017] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b7affb-5f38-6a7c-76c6-554e3e3611a0, 'name': SearchDatastore_Task, 'duration_secs': 0.021384} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2949.158385] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/d008019d-b38c-46c2-84c5-4290d4b4510e is no longer used. Deleting! [ 2949.158477] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/d008019d-b38c-46c2-84c5-4290d4b4510e {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2949.158832] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-344c6743-609e-4d58-8ac3-02ddbe292e08 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2949.165389] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2949.165389] env[62875]: value = "task-2180857" [ 2949.165389] env[62875]: _type = "Task" [ 2949.165389] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2949.172723] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2949.464070] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180856, 'name': Destroy_Task, 'duration_secs': 0.621296} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2949.464070] env[62875]: INFO nova.virt.vmwareapi.vm_util [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Destroyed the VM [ 2949.464426] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Deleting Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2949.464489] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4079f83d-c924-4706-aa84-b2e5df0afa6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2949.471098] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2949.471098] env[62875]: value = "task-2180858" [ 2949.471098] env[62875]: _type = "Task" [ 2949.471098] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2949.478527] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180858, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2949.675030] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171674} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2949.675292] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2949.675438] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/d008019d-b38c-46c2-84c5-4290d4b4510e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2949.675670] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/2a14ed21-f1d3-4076-ba94-5570d5421316" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2949.675780] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/2a14ed21-f1d3-4076-ba94-5570d5421316" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2949.676145] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2a14ed21-f1d3-4076-ba94-5570d5421316" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2949.676411] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eeeaed75-e921-4a4e-96f0-ea627d124865 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2949.680547] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2949.680547] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526697e0-7fcf-e5e8-1cd8-0123721617e7" [ 2949.680547] env[62875]: _type = "Task" [ 2949.680547] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2949.688278] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526697e0-7fcf-e5e8-1cd8-0123721617e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2949.980697] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180858, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2950.190935] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526697e0-7fcf-e5e8-1cd8-0123721617e7, 'name': SearchDatastore_Task, 'duration_secs': 0.011193} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2950.191324] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/2a14ed21-f1d3-4076-ba94-5570d5421316" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2950.191536] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/1ed404ac-e788-42c8-8f50-340af216d0d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2950.191653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/1ed404ac-e788-42c8-8f50-340af216d0d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2950.191982] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1ed404ac-e788-42c8-8f50-340af216d0d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2950.192257] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a37f4433-4f91-4cbd-b087-b1cc075c4d18 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2950.196295] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2950.196295] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52865f78-b318-6245-82c4-6579b2ed4373" [ 2950.196295] env[62875]: _type = "Task" [ 2950.196295] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2950.204188] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52865f78-b318-6245-82c4-6579b2ed4373, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2950.481428] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180858, 'name': RemoveSnapshot_Task, 'duration_secs': 0.563427} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2950.481691] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Deleted Snapshot of the VM instance {{(pid=62875) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2950.482106] env[62875]: DEBUG nova.compute.manager [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2950.482743] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a0910e-2a11-4107-b737-f899d91fa6a9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2950.707396] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52865f78-b318-6245-82c4-6579b2ed4373, 'name': SearchDatastore_Task, 'duration_secs': 0.013453} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2950.707728] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/1ed404ac-e788-42c8-8f50-340af216d0d5" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2950.707953] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/e13db1c7-797d-42c3-8fcc-b50337072a11" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2950.708091] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/e13db1c7-797d-42c3-8fcc-b50337072a11" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2950.708471] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e13db1c7-797d-42c3-8fcc-b50337072a11" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2950.708739] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e88374b-ecc8-4501-818d-92bf64d654e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2950.713305] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2950.713305] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525390bc-31ad-6709-ef84-f57f4e3fb14d" [ 2950.713305] env[62875]: _type = "Task" [ 2950.713305] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2950.720839] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525390bc-31ad-6709-ef84-f57f4e3fb14d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2950.994067] env[62875]: INFO nova.compute.manager [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Shelve offloading [ 2951.223781] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]525390bc-31ad-6709-ef84-f57f4e3fb14d, 'name': SearchDatastore_Task, 'duration_secs': 0.012128} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2951.224163] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/e13db1c7-797d-42c3-8fcc-b50337072a11" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2951.224336] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/1b9133c4-31d1-4883-aa5b-d6732541fc7e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2951.224456] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/1b9133c4-31d1-4883-aa5b-d6732541fc7e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2951.224801] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1b9133c4-31d1-4883-aa5b-d6732541fc7e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2951.225073] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e19c9e6-6869-4b5a-a0eb-f1b10e8e9685 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2951.229117] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2951.229117] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d05e6b-e788-f458-9512-3b741ebda0da" [ 2951.229117] env[62875]: _type = "Task" [ 2951.229117] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2951.236008] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d05e6b-e788-f458-9512-3b741ebda0da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2951.497541] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2951.497831] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13d9bd24-b64a-43e1-955f-09817f439ad9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2951.505875] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2951.505875] env[62875]: value = "task-2180859" [ 2951.505875] env[62875]: _type = "Task" [ 2951.505875] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2951.513699] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180859, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2951.739371] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d05e6b-e788-f458-9512-3b741ebda0da, 'name': SearchDatastore_Task, 'duration_secs': 0.009783} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2951.739727] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/1b9133c4-31d1-4883-aa5b-d6732541fc7e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2951.740034] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/093856ae-59e6-479c-a8fa-70f37d55d61d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2951.740175] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/093856ae-59e6-479c-a8fa-70f37d55d61d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2951.740500] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/093856ae-59e6-479c-a8fa-70f37d55d61d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2951.740770] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c3faeba-5a3d-47f1-b8bc-cc969be3b997 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2951.745525] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2951.745525] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527be063-137d-d68a-d2a9-ea80e34c5d24" [ 2951.745525] env[62875]: _type = "Task" [ 2951.745525] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2951.752897] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527be063-137d-d68a-d2a9-ea80e34c5d24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2952.016757] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] VM already powered off {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2952.016918] env[62875]: DEBUG nova.compute.manager [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2952.018080] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4a860e-f386-4f4d-978c-d5ad808bbac9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.024889] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2952.024889] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2952.024889] env[62875]: DEBUG nova.network.neutron [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2952.257052] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527be063-137d-d68a-d2a9-ea80e34c5d24, 'name': SearchDatastore_Task, 'duration_secs': 0.013585} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2952.257052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/093856ae-59e6-479c-a8fa-70f37d55d61d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2952.257052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/0b3f1c89-ed96-4133-b45c-1bf34e9d74e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2952.257052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/0b3f1c89-ed96-4133-b45c-1bf34e9d74e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2952.257551] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0b3f1c89-ed96-4133-b45c-1bf34e9d74e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2952.257551] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-715a1a5b-cf6b-4f23-975d-835b9250efce {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.261560] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2952.261560] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52407fc0-d364-3eec-898f-4dd0cee7e328" [ 2952.261560] env[62875]: _type = "Task" [ 2952.261560] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2952.268544] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52407fc0-d364-3eec-898f-4dd0cee7e328, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2952.729185] env[62875]: DEBUG nova.network.neutron [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4db9102-d2", "ovs_interfaceid": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2952.772770] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52407fc0-d364-3eec-898f-4dd0cee7e328, 'name': SearchDatastore_Task, 'duration_secs': 0.010331} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2952.773109] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/0b3f1c89-ed96-4133-b45c-1bf34e9d74e2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2952.773334] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/f742880c-ba69-4098-9a0f-d968ebf048eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2952.773455] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/f742880c-ba69-4098-9a0f-d968ebf048eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2952.773760] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f742880c-ba69-4098-9a0f-d968ebf048eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2952.774025] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d27922b-b950-4197-9d3e-230c1912449d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.778112] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2952.778112] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e50668-950d-db26-3b1c-887e90543197" [ 2952.778112] env[62875]: _type = "Task" [ 2952.778112] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2952.785031] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e50668-950d-db26-3b1c-887e90543197, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2953.232626] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2953.288070] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e50668-950d-db26-3b1c-887e90543197, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2953.288425] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/f742880c-ba69-4098-9a0f-d968ebf048eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2953.288628] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/c9369dba-46b6-4d8d-9a6b-bdf0007512eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2953.288747] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/c9369dba-46b6-4d8d-9a6b-bdf0007512eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2953.289060] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9369dba-46b6-4d8d-9a6b-bdf0007512eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2953.289328] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9acd988-5fe1-42b2-a552-5bbd37cca6bd {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.295066] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2953.295066] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d6c996-98d7-3be9-0824-924a97aada58" [ 2953.295066] env[62875]: _type = "Task" [ 2953.295066] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2953.302799] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d6c996-98d7-3be9-0824-924a97aada58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2953.441615] env[62875]: DEBUG nova.compute.manager [req-a99ac6fd-9795-4a03-9ad3-34178ae80f5d req-0590af9f-7fde-49c2-a736-df1d4efd4c57 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received event network-vif-unplugged-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2953.441832] env[62875]: DEBUG oslo_concurrency.lockutils [req-a99ac6fd-9795-4a03-9ad3-34178ae80f5d req-0590af9f-7fde-49c2-a736-df1d4efd4c57 service nova] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2953.442053] env[62875]: DEBUG oslo_concurrency.lockutils [req-a99ac6fd-9795-4a03-9ad3-34178ae80f5d req-0590af9f-7fde-49c2-a736-df1d4efd4c57 service nova] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2953.442225] env[62875]: DEBUG oslo_concurrency.lockutils [req-a99ac6fd-9795-4a03-9ad3-34178ae80f5d req-0590af9f-7fde-49c2-a736-df1d4efd4c57 service nova] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2953.442389] env[62875]: DEBUG nova.compute.manager [req-a99ac6fd-9795-4a03-9ad3-34178ae80f5d req-0590af9f-7fde-49c2-a736-df1d4efd4c57 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] No waiting events found dispatching network-vif-unplugged-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2953.442554] env[62875]: WARNING nova.compute.manager [req-a99ac6fd-9795-4a03-9ad3-34178ae80f5d req-0590af9f-7fde-49c2-a736-df1d4efd4c57 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received unexpected event network-vif-unplugged-f4db9102-d2d3-403c-bfd9-37d1942d463b for instance with vm_state shelved and task_state shelving_offloading. [ 2953.540631] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2953.541571] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a493c4-78c5-4545-b824-887136162924 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.549310] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2953.549520] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af328df9-551b-4bcc-ad26-7c2fd728ea1d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.805374] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d6c996-98d7-3be9-0824-924a97aada58, 'name': SearchDatastore_Task, 'duration_secs': 0.009117} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2953.805704] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/c9369dba-46b6-4d8d-9a6b-bdf0007512eb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2953.805930] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/6e470760-a9cb-40f3-b61d-bde9da6906e1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2953.806069] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/6e470760-a9cb-40f3-b61d-bde9da6906e1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2953.806396] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6e470760-a9cb-40f3-b61d-bde9da6906e1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2953.806647] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46207375-4082-49f0-a984-335ac4a89cf7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.810995] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2953.810995] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c10801-5bc4-3017-f110-83537d5fc191" [ 2953.810995] env[62875]: _type = "Task" [ 2953.810995] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2953.817902] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c10801-5bc4-3017-f110-83537d5fc191, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2954.326099] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c10801-5bc4-3017-f110-83537d5fc191, 'name': SearchDatastore_Task, 'duration_secs': 0.009673} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2954.326578] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/6e470760-a9cb-40f3-b61d-bde9da6906e1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2954.326929] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2954.327156] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2954.327607] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2954.327987] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9db39e2f-3fa6-4618-90a3-9e8828d1a3c4 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.333613] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2954.333613] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520c57e3-962b-3bc1-0f6e-be59a9e8042a" [ 2954.333613] env[62875]: _type = "Task" [ 2954.333613] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2954.344495] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520c57e3-962b-3bc1-0f6e-be59a9e8042a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2954.844369] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520c57e3-962b-3bc1-0f6e-be59a9e8042a, 'name': SearchDatastore_Task, 'duration_secs': 0.008718} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2954.844654] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore2] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229/ts-2025-01-25-05-30-19 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2954.844917] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67b15585-41c1-4054-a786-46d653baba06 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.857309] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore2] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229/ts-2025-01-25-05-30-19 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2954.857471] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image ca67da56-91ff-4a3f-8660-2df3b9b08229 is no longer used by this node. Pending deletion! [ 2954.857613] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/ca67da56-91ff-4a3f-8660-2df3b9b08229" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2954.857848] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/023ea807-eb40-4c08-a16a-aa94bee554ef" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2954.857986] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/023ea807-eb40-4c08-a16a-aa94bee554ef" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2954.858391] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/023ea807-eb40-4c08-a16a-aa94bee554ef" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2954.858748] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7092996f-5a19-4eb9-a724-28310ba07d8e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.863488] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2954.863488] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5231145c-bb2a-ea57-3950-ea16d841bba8" [ 2954.863488] env[62875]: _type = "Task" [ 2954.863488] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2954.871304] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5231145c-bb2a-ea57-3950-ea16d841bba8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2955.374971] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5231145c-bb2a-ea57-3950-ea16d841bba8, 'name': SearchDatastore_Task, 'duration_secs': 0.009825} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2955.375351] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/023ea807-eb40-4c08-a16a-aa94bee554ef" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2955.375580] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/920a2d3a-e4b7-4ccf-88f8-fae96670e5ac" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2955.375778] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/920a2d3a-e4b7-4ccf-88f8-fae96670e5ac" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2955.376046] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/920a2d3a-e4b7-4ccf-88f8-fae96670e5ac" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2955.376335] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35aa398e-a71b-4b38-a6cc-56b5a9d55197 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2955.381166] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2955.381166] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527e9dbc-b293-c5fc-6537-dcf0411187c3" [ 2955.381166] env[62875]: _type = "Task" [ 2955.381166] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2955.390321] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527e9dbc-b293-c5fc-6537-dcf0411187c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2955.467700] env[62875]: DEBUG nova.compute.manager [req-6861cd80-6179-4df3-9d6b-29b037470b5a req-44a4fe6b-77c4-40f9-8312-1fadea83b231 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received event network-changed-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2955.468077] env[62875]: DEBUG nova.compute.manager [req-6861cd80-6179-4df3-9d6b-29b037470b5a req-44a4fe6b-77c4-40f9-8312-1fadea83b231 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Refreshing instance network info cache due to event network-changed-f4db9102-d2d3-403c-bfd9-37d1942d463b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2955.468225] env[62875]: DEBUG oslo_concurrency.lockutils [req-6861cd80-6179-4df3-9d6b-29b037470b5a req-44a4fe6b-77c4-40f9-8312-1fadea83b231 service nova] Acquiring lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2955.468389] env[62875]: DEBUG oslo_concurrency.lockutils [req-6861cd80-6179-4df3-9d6b-29b037470b5a req-44a4fe6b-77c4-40f9-8312-1fadea83b231 service nova] Acquired lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2955.468550] env[62875]: DEBUG nova.network.neutron [req-6861cd80-6179-4df3-9d6b-29b037470b5a req-44a4fe6b-77c4-40f9-8312-1fadea83b231 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Refreshing network info cache for port f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2955.891686] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]527e9dbc-b293-c5fc-6537-dcf0411187c3, 'name': SearchDatastore_Task, 'duration_secs': 0.010584} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2955.891977] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/920a2d3a-e4b7-4ccf-88f8-fae96670e5ac is no longer used. Deleting! [ 2955.892134] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/920a2d3a-e4b7-4ccf-88f8-fae96670e5ac {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2955.892378] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05e5e39a-32e8-4adc-9d2e-a578cc0d6840 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2955.898185] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2955.898185] env[62875]: value = "task-2180861" [ 2955.898185] env[62875]: _type = "Task" [ 2955.898185] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2955.905476] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180861, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2956.173420] env[62875]: DEBUG nova.network.neutron [req-6861cd80-6179-4df3-9d6b-29b037470b5a req-44a4fe6b-77c4-40f9-8312-1fadea83b231 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updated VIF entry in instance network info cache for port f4db9102-d2d3-403c-bfd9-37d1942d463b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2956.173778] env[62875]: DEBUG nova.network.neutron [req-6861cd80-6179-4df3-9d6b-29b037470b5a req-44a4fe6b-77c4-40f9-8312-1fadea83b231 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf4db9102-d2", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2956.408069] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10583} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2956.408430] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2956.408430] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/920a2d3a-e4b7-4ccf-88f8-fae96670e5ac" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2956.408621] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/d638b542-6030-4b6c-8944-e7ddbb362cf0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2956.408736] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/d638b542-6030-4b6c-8944-e7ddbb362cf0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2956.409079] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d638b542-6030-4b6c-8944-e7ddbb362cf0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2956.409337] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81441137-5bcb-49f4-a0be-8b714742d8a5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2956.413847] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2956.413847] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb175a-0ae8-70b3-7056-c408fd83df6f" [ 2956.413847] env[62875]: _type = "Task" [ 2956.413847] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2956.420918] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb175a-0ae8-70b3-7056-c408fd83df6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2956.677211] env[62875]: DEBUG oslo_concurrency.lockutils [req-6861cd80-6179-4df3-9d6b-29b037470b5a req-44a4fe6b-77c4-40f9-8312-1fadea83b231 service nova] Releasing lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2956.924274] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb175a-0ae8-70b3-7056-c408fd83df6f, 'name': SearchDatastore_Task, 'duration_secs': 0.009907} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2956.924585] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/d638b542-6030-4b6c-8944-e7ddbb362cf0 is no longer used. Deleting! [ 2956.924728] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/d638b542-6030-4b6c-8944-e7ddbb362cf0 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2956.924989] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06635820-e43e-4486-b396-12a5e4a73e8a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2956.931190] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2956.931190] env[62875]: value = "task-2180862" [ 2956.931190] env[62875]: _type = "Task" [ 2956.931190] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2956.938116] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180862, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2957.440579] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180862, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103717} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2957.441047] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2957.441047] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/d638b542-6030-4b6c-8944-e7ddbb362cf0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2957.441182] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/b4bf605e-af7a-4bf4-b6de-b023e9ded5d8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2957.441303] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/b4bf605e-af7a-4bf4-b6de-b023e9ded5d8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2957.441620] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b4bf605e-af7a-4bf4-b6de-b023e9ded5d8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2957.441876] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48a2d36a-bcd3-41c8-88fe-92833ca0f418 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.445939] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2957.445939] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fa96dd-cddb-214b-4ed9-1301655a2d3b" [ 2957.445939] env[62875]: _type = "Task" [ 2957.445939] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2957.452832] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fa96dd-cddb-214b-4ed9-1301655a2d3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2957.956731] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fa96dd-cddb-214b-4ed9-1301655a2d3b, 'name': SearchDatastore_Task, 'duration_secs': 0.009633} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2957.957079] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/b4bf605e-af7a-4bf4-b6de-b023e9ded5d8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2957.957308] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/5e2947cb-6966-4d47-ac04-4bd4dc218aec" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2957.957430] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/5e2947cb-6966-4d47-ac04-4bd4dc218aec" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2957.957753] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5e2947cb-6966-4d47-ac04-4bd4dc218aec" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2957.958059] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77bc155d-f314-454b-943a-6725bbbbfbe5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.962773] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2957.962773] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5283fabf-3ab3-396a-e7be-683ce4f5cc3e" [ 2957.962773] env[62875]: _type = "Task" [ 2957.962773] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2957.969807] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5283fabf-3ab3-396a-e7be-683ce4f5cc3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2958.474708] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5283fabf-3ab3-396a-e7be-683ce4f5cc3e, 'name': SearchDatastore_Task, 'duration_secs': 0.009514} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2958.475042] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/5e2947cb-6966-4d47-ac04-4bd4dc218aec" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2958.475262] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2958.475378] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2958.475740] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2958.476012] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3283b2c5-83a7-4e18-9267-97239bdf73be {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2958.480029] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2958.480029] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52df293c-2c5c-38af-5ae1-f632199c13e5" [ 2958.480029] env[62875]: _type = "Task" [ 2958.480029] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2958.487302] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52df293c-2c5c-38af-5ae1-f632199c13e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2958.990510] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52df293c-2c5c-38af-5ae1-f632199c13e5, 'name': SearchDatastore_Task, 'duration_secs': 0.010826} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2958.990815] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17 is no longer used. Deleting! [ 2958.990967] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2958.991241] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9be62539-1020-481b-9bf0-7db21b359a7f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2958.997767] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2958.997767] env[62875]: value = "task-2180863" [ 2958.997767] env[62875]: _type = "Task" [ 2958.997767] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2959.004961] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2959.508111] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092596} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2959.508436] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2959.508487] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/b9994bb0-44f6-42ee-9ca4-51f1ca7e9b17" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2959.508707] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2959.508823] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2959.509094] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2959.509344] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57834c79-0c14-4205-b2d6-6828e7f61003 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2959.513451] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2959.513451] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a2ee2d-9657-115d-84ec-5c0c5c797aa9" [ 2959.513451] env[62875]: _type = "Task" [ 2959.513451] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2959.520439] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a2ee2d-9657-115d-84ec-5c0c5c797aa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2959.653093] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2959.653304] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2959.653490] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleting the datastore file [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2959.653763] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82fc223a-77c9-4d06-a07e-bc2068c70904 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2959.659777] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2959.659777] env[62875]: value = "task-2180864" [ 2959.659777] env[62875]: _type = "Task" [ 2959.659777] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2959.666935] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2960.023936] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a2ee2d-9657-115d-84ec-5c0c5c797aa9, 'name': SearchDatastore_Task, 'duration_secs': 0.008305} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2960.024179] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/ts-2025-01-25-05-30-24 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2960.024436] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d6105c2-07ba-4c4a-9458-239480eba11a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2960.035860] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36/ts-2025-01-25-05-30-24 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2960.035996] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image c28dfddf-e555-419c-86a5-ca1247af5c36 is no longer used by this node. Pending deletion! [ 2960.036171] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/c28dfddf-e555-419c-86a5-ca1247af5c36" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2960.036380] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/3e246b4b-5f14-409c-8b8d-486d2ad583a8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2960.036497] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/3e246b4b-5f14-409c-8b8d-486d2ad583a8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2960.036816] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3e246b4b-5f14-409c-8b8d-486d2ad583a8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2960.037051] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98475e19-df21-4caa-aab2-c50a2dadc5c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2960.041168] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2960.041168] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520cf25c-d945-aec6-727b-effa49e1cc6b" [ 2960.041168] env[62875]: _type = "Task" [ 2960.041168] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2960.048595] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520cf25c-d945-aec6-727b-effa49e1cc6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2960.171634] env[62875]: DEBUG oslo_vmware.api [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148006} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2960.172024] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2960.172329] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2960.172611] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2960.198964] env[62875]: INFO nova.scheduler.client.report [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted allocations for instance 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 [ 2960.551580] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]520cf25c-d945-aec6-727b-effa49e1cc6b, 'name': SearchDatastore_Task, 'duration_secs': 0.008609} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2960.551991] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/3e246b4b-5f14-409c-8b8d-486d2ad583a8 is no longer used. Deleting! [ 2960.552067] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/3e246b4b-5f14-409c-8b8d-486d2ad583a8 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2960.552275] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fa56735-016e-4be4-a323-ed676b8aaa26 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2960.559038] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2960.559038] env[62875]: value = "task-2180865" [ 2960.559038] env[62875]: _type = "Task" [ 2960.559038] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2960.566289] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180865, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2960.704778] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2960.705040] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2960.705283] env[62875]: DEBUG nova.objects.instance [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'resources' on Instance uuid 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2961.069383] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180865, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103702} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2961.069603] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2961.069774] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/3e246b4b-5f14-409c-8b8d-486d2ad583a8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2961.070023] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/1e2c1464-86be-41fc-9dfe-a5da98f5f899" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2961.070149] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/1e2c1464-86be-41fc-9dfe-a5da98f5f899" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2961.070460] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1e2c1464-86be-41fc-9dfe-a5da98f5f899" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2961.070717] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f78333c2-9fd8-4869-aab0-08740bfcd638 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.074929] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2961.074929] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a2177e-5cf1-cd7d-0f40-cc8026888320" [ 2961.074929] env[62875]: _type = "Task" [ 2961.074929] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2961.082512] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a2177e-5cf1-cd7d-0f40-cc8026888320, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2961.208451] env[62875]: DEBUG nova.objects.instance [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'numa_topology' on Instance uuid 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2961.585302] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52a2177e-5cf1-cd7d-0f40-cc8026888320, 'name': SearchDatastore_Task, 'duration_secs': 0.009673} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2961.585680] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/1e2c1464-86be-41fc-9dfe-a5da98f5f899" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2961.585850] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/a2493497-b27a-404a-b841-e58cbfd86ac3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2961.585970] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/a2493497-b27a-404a-b841-e58cbfd86ac3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2961.586303] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a2493497-b27a-404a-b841-e58cbfd86ac3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2961.586561] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f84fba5f-bf25-4a18-8b33-4f4a18c53c3c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.590899] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2961.590899] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521cca65-db49-3575-ade5-b2ffb95e3715" [ 2961.590899] env[62875]: _type = "Task" [ 2961.590899] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2961.598637] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521cca65-db49-3575-ade5-b2ffb95e3715, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2961.711376] env[62875]: DEBUG nova.objects.base [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Object Instance<00e65eb9-1db1-4456-9603-8b4cbff8ffe8> lazy-loaded attributes: resources,numa_topology {{(pid=62875) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2961.727982] env[62875]: DEBUG nova.scheduler.client.report [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2961.740829] env[62875]: DEBUG nova.scheduler.client.report [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2961.741241] env[62875]: DEBUG nova.compute.provider_tree [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2961.751791] env[62875]: DEBUG nova.scheduler.client.report [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2961.768474] env[62875]: DEBUG nova.scheduler.client.report [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2961.779909] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d2a14f-6256-433f-a438-d1c5d9256aa6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.787203] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00864e63-a5ee-47b8-9489-3c8a34816a14 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.817353] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926234ba-5d37-4403-a896-25021117a957 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.824307] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f0d112-cf7e-45b0-bcf7-7db716a62afb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.837532] env[62875]: DEBUG nova.compute.provider_tree [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2962.101640] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521cca65-db49-3575-ade5-b2ffb95e3715, 'name': SearchDatastore_Task, 'duration_secs': 0.009479} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2962.101960] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/a2493497-b27a-404a-b841-e58cbfd86ac3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2962.102211] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/2443d662-1d0d-449d-88b6-e47ea2999008" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2962.102336] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/2443d662-1d0d-449d-88b6-e47ea2999008" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2962.102652] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2443d662-1d0d-449d-88b6-e47ea2999008" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2962.102916] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-543f2478-a9f6-4179-9722-eb4d6a6737b2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2962.107894] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2962.107894] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c859bd-967d-aaac-f8e6-83650ed4b319" [ 2962.107894] env[62875]: _type = "Task" [ 2962.107894] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2962.115417] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c859bd-967d-aaac-f8e6-83650ed4b319, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2962.340985] env[62875]: DEBUG nova.scheduler.client.report [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2962.618476] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52c859bd-967d-aaac-f8e6-83650ed4b319, 'name': SearchDatastore_Task, 'duration_secs': 0.009506} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2962.618860] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/2443d662-1d0d-449d-88b6-e47ea2999008" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2962.619110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/ea0c8a7f-645a-4c18-9776-73cd5e4adbf3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2962.619234] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/ea0c8a7f-645a-4c18-9776-73cd5e4adbf3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2962.619558] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ea0c8a7f-645a-4c18-9776-73cd5e4adbf3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2962.619821] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eb89f63-370b-4e69-8de9-2cbcabb88b5c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2962.624135] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2962.624135] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52db233b-ddb1-3e78-1e10-a5a09119c2f8" [ 2962.624135] env[62875]: _type = "Task" [ 2962.624135] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2962.631541] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52db233b-ddb1-3e78-1e10-a5a09119c2f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2962.846320] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.141s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2963.024667] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2963.134214] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52db233b-ddb1-3e78-1e10-a5a09119c2f8, 'name': SearchDatastore_Task, 'duration_secs': 0.010371} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2963.134543] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/ea0c8a7f-645a-4c18-9776-73cd5e4adbf3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2963.134769] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/f2d82461-b7f3-4734-ae09-8a00190a1173" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2963.134888] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/f2d82461-b7f3-4734-ae09-8a00190a1173" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2963.135224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2d82461-b7f3-4734-ae09-8a00190a1173" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2963.135486] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d418d325-7084-40ef-bb71-aab51857ff9c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2963.139884] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2963.139884] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ab5b87-1276-fa42-fc0c-ea3049bda7b7" [ 2963.139884] env[62875]: _type = "Task" [ 2963.139884] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2963.147291] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ab5b87-1276-fa42-fc0c-ea3049bda7b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2963.354095] env[62875]: DEBUG oslo_concurrency.lockutils [None req-8b3ab40d-0210-43e2-8408-62b7bf012c57 tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.550s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2963.354871] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.330s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2963.355058] env[62875]: INFO nova.compute.manager [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Unshelving [ 2963.650502] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ab5b87-1276-fa42-fc0c-ea3049bda7b7, 'name': SearchDatastore_Task, 'duration_secs': 0.009917} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2963.650844] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/f2d82461-b7f3-4734-ae09-8a00190a1173" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2963.651016] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/98e60bc0-9bc6-4d07-8a92-e74bcc7c1441" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2963.651148] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/98e60bc0-9bc6-4d07-8a92-e74bcc7c1441" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2963.651467] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/98e60bc0-9bc6-4d07-8a92-e74bcc7c1441" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2963.651731] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd4b75d0-95af-428b-929e-91ffe3a4cd15 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2963.655937] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2963.655937] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521f81fe-9f40-d5c6-626e-aff6b7132e2f" [ 2963.655937] env[62875]: _type = "Task" [ 2963.655937] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2963.663116] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521f81fe-9f40-d5c6-626e-aff6b7132e2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2964.166531] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]521f81fe-9f40-d5c6-626e-aff6b7132e2f, 'name': SearchDatastore_Task, 'duration_secs': 0.009876} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2964.166883] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/98e60bc0-9bc6-4d07-8a92-e74bcc7c1441" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2964.167127] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/14e75ded-0f0e-4bbd-919a-0df16ff69f4b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2964.167253] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/14e75ded-0f0e-4bbd-919a-0df16ff69f4b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2964.167573] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/14e75ded-0f0e-4bbd-919a-0df16ff69f4b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2964.167861] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c853747e-e70d-47b8-b4e5-69387d10e4c3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.172268] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2964.172268] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52149f90-688e-3f65-4440-1b8c36b4485b" [ 2964.172268] env[62875]: _type = "Task" [ 2964.172268] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2964.180631] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52149f90-688e-3f65-4440-1b8c36b4485b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2964.375027] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2964.375336] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2964.375550] env[62875]: DEBUG nova.objects.instance [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'pci_requests' on Instance uuid 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2964.682867] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52149f90-688e-3f65-4440-1b8c36b4485b, 'name': SearchDatastore_Task, 'duration_secs': 0.01092} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2964.683207] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/14e75ded-0f0e-4bbd-919a-0df16ff69f4b is no longer used. Deleting! [ 2964.683332] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/14e75ded-0f0e-4bbd-919a-0df16ff69f4b {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2964.683583] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3efdcf69-b62a-4e3b-98fe-0af9d99e1d6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.690056] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2964.690056] env[62875]: value = "task-2180866" [ 2964.690056] env[62875]: _type = "Task" [ 2964.690056] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2964.697335] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2964.879665] env[62875]: DEBUG nova.objects.instance [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'numa_topology' on Instance uuid 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2965.200437] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109145} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2965.200588] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2965.200826] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/14e75ded-0f0e-4bbd-919a-0df16ff69f4b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2965.201083] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/9b4c8168-e0fe-4629-bf57-53d4dfa21dbb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2965.201203] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/9b4c8168-e0fe-4629-bf57-53d4dfa21dbb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2965.201526] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/9b4c8168-e0fe-4629-bf57-53d4dfa21dbb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2965.201785] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33fd480d-a8f1-4e42-adae-ee332acfccff {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.207045] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2965.207045] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ca92e5-1fc5-eba4-c01c-28874499a42f" [ 2965.207045] env[62875]: _type = "Task" [ 2965.207045] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2965.213684] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ca92e5-1fc5-eba4-c01c-28874499a42f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2965.382596] env[62875]: INFO nova.compute.claims [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2965.716332] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ca92e5-1fc5-eba4-c01c-28874499a42f, 'name': SearchDatastore_Task, 'duration_secs': 0.009977} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2965.716716] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/9b4c8168-e0fe-4629-bf57-53d4dfa21dbb is no longer used. Deleting! [ 2965.716764] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/9b4c8168-e0fe-4629-bf57-53d4dfa21dbb {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2965.717019] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5368bab-843d-4823-808b-f1ba059fe6e0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.723404] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2965.723404] env[62875]: value = "task-2180867" [ 2965.723404] env[62875]: _type = "Task" [ 2965.723404] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2965.730488] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180867, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2966.233754] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180867, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105761} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2966.233992] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2966.234186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/9b4c8168-e0fe-4629-bf57-53d4dfa21dbb" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2966.234409] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/f6162228-9f0c-4f73-aeac-25612f8b9b3b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2966.234528] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/f6162228-9f0c-4f73-aeac-25612f8b9b3b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2966.234863] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6162228-9f0c-4f73-aeac-25612f8b9b3b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2966.235138] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3aee27d-b2e1-49e8-a808-93710015c610 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.239310] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2966.239310] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5289b1b6-4431-a07c-1cf8-3fc786037003" [ 2966.239310] env[62875]: _type = "Task" [ 2966.239310] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2966.246560] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5289b1b6-4431-a07c-1cf8-3fc786037003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2966.414025] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef83a938-35ec-4355-bb39-d044b06d95fe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.421208] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e0f7c9-6ae7-40a1-a7cb-df0a4ba56827 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.451940] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9e347b-a206-49b9-b103-0e7e784003a9 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.458792] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761dc6b4-c873-4626-8697-ac7e4201b6a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.471618] env[62875]: DEBUG nova.compute.provider_tree [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2966.749222] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5289b1b6-4431-a07c-1cf8-3fc786037003, 'name': SearchDatastore_Task, 'duration_secs': 0.009751} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2966.749602] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/f6162228-9f0c-4f73-aeac-25612f8b9b3b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2966.749770] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/69ff1dff-fe96-44af-83ce-b64fe4278d3e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2966.749891] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/69ff1dff-fe96-44af-83ce-b64fe4278d3e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2966.750261] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/69ff1dff-fe96-44af-83ce-b64fe4278d3e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2966.750519] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3fc2ef9-b59c-456c-815f-a95d3183487d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.754934] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2966.754934] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b03712-1baf-90fb-3729-6426211fdf45" [ 2966.754934] env[62875]: _type = "Task" [ 2966.754934] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2966.762297] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b03712-1baf-90fb-3729-6426211fdf45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2966.991278] env[62875]: ERROR nova.scheduler.client.report [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [req-0bf11e24-7d1d-4d71-8193-20d4fe8473be] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2d6e5fad-ed55-4f17-b68d-be9dae183a02. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0bf11e24-7d1d-4d71-8193-20d4fe8473be"}]} [ 2967.006731] env[62875]: DEBUG nova.scheduler.client.report [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Refreshing inventories for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2967.019088] env[62875]: DEBUG nova.scheduler.client.report [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updating ProviderTree inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2967.019313] env[62875]: DEBUG nova.compute.provider_tree [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2967.029192] env[62875]: DEBUG nova.scheduler.client.report [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Refreshing aggregate associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, aggregates: None {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2967.045106] env[62875]: DEBUG nova.scheduler.client.report [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Refreshing trait associations for resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62875) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2967.065660] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d51558-1ce6-4277-ae7a-83aa0f5f1951 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.072948] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b04f123-0471-4f05-8e89-b61f63373d8c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.103232] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c252b7-2922-4b4f-bc6d-8c77caac0df8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.110014] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e646150-063e-4522-b4b7-a644b914963a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.122685] env[62875]: DEBUG nova.compute.provider_tree [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2967.264849] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b03712-1baf-90fb-3729-6426211fdf45, 'name': SearchDatastore_Task, 'duration_secs': 0.013059} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2967.265164] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/69ff1dff-fe96-44af-83ce-b64fe4278d3e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2967.265389] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/66e59ec1-250c-45cc-bae0-90ef142b17d3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2967.265510] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/66e59ec1-250c-45cc-bae0-90ef142b17d3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2967.265828] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/66e59ec1-250c-45cc-bae0-90ef142b17d3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2967.266095] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56a067ff-8607-446d-8228-0cf0004e8d8d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.270629] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2967.270629] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d49552-29d0-a7f1-6a9a-355f4a3b16df" [ 2967.270629] env[62875]: _type = "Task" [ 2967.270629] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.277900] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d49552-29d0-a7f1-6a9a-355f4a3b16df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.653621] env[62875]: DEBUG nova.scheduler.client.report [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updated inventory for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2967.653915] env[62875]: DEBUG nova.compute.provider_tree [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updating resource provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 generation from 135 to 136 during operation: update_inventory {{(pid=62875) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2967.654111] env[62875]: DEBUG nova.compute.provider_tree [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Updating inventory in ProviderTree for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2967.780499] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d49552-29d0-a7f1-6a9a-355f4a3b16df, 'name': SearchDatastore_Task, 'duration_secs': 0.010008} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2967.780860] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/66e59ec1-250c-45cc-bae0-90ef142b17d3 is no longer used. Deleting! [ 2967.781027] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/66e59ec1-250c-45cc-bae0-90ef142b17d3 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2967.781256] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69969903-62d7-4b92-90bc-2903946b5fa5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.787849] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2967.787849] env[62875]: value = "task-2180868" [ 2967.787849] env[62875]: _type = "Task" [ 2967.787849] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.795262] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180868, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2968.159469] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.784s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2968.188277] env[62875]: INFO nova.network.neutron [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating port f4db9102-d2d3-403c-bfd9-37d1942d463b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2968.297521] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180868, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105187} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2968.297756] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2968.297941] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/66e59ec1-250c-45cc-bae0-90ef142b17d3" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2968.298178] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/8e66a304-9137-431c-b29c-0fcfcd209e45" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2968.298299] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/8e66a304-9137-431c-b29c-0fcfcd209e45" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2968.298649] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8e66a304-9137-431c-b29c-0fcfcd209e45" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2968.298947] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82d757bb-868e-4939-8693-5890101d3d6f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.303390] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2968.303390] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5216c42d-850e-0c21-a3b6-da70eb0feb8c" [ 2968.303390] env[62875]: _type = "Task" [ 2968.303390] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2968.310877] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5216c42d-850e-0c21-a3b6-da70eb0feb8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2968.813648] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5216c42d-850e-0c21-a3b6-da70eb0feb8c, 'name': SearchDatastore_Task, 'duration_secs': 0.008964} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2968.813997] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore2] devstack-image-cache_base/8e66a304-9137-431c-b29c-0fcfcd209e45/ts-2025-01-25-05-30-33 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2968.814195] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-052400de-acdb-40cb-9bdc-6e162c33f96a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.826212] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore2] devstack-image-cache_base/8e66a304-9137-431c-b29c-0fcfcd209e45/ts-2025-01-25-05-30-33 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2968.826358] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image 8e66a304-9137-431c-b29c-0fcfcd209e45 is no longer used by this node. Pending deletion! [ 2968.826508] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/8e66a304-9137-431c-b29c-0fcfcd209e45" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2968.826722] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/edd11f48-ec90-49d5-ac00-dfc23e2f0432" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2968.826837] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/edd11f48-ec90-49d5-ac00-dfc23e2f0432" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2968.827186] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/edd11f48-ec90-49d5-ac00-dfc23e2f0432" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2968.827418] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-476c6f99-78f8-4b42-878f-b4330b966284 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.831266] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2968.831266] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529b5094-4daa-1268-8096-4baff856fdb0" [ 2968.831266] env[62875]: _type = "Task" [ 2968.831266] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2968.838470] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529b5094-4daa-1268-8096-4baff856fdb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2969.341685] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]529b5094-4daa-1268-8096-4baff856fdb0, 'name': SearchDatastore_Task, 'duration_secs': 0.00939} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2969.341982] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/edd11f48-ec90-49d5-ac00-dfc23e2f0432 is no longer used. Deleting! [ 2969.342142] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/edd11f48-ec90-49d5-ac00-dfc23e2f0432 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2969.342403] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12e2d3a9-ad88-4220-a26a-51ea6e3d66fa {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.348109] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2969.348109] env[62875]: value = "task-2180869" [ 2969.348109] env[62875]: _type = "Task" [ 2969.348109] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2969.356328] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2969.857250] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106963} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2969.857594] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2969.857634] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/edd11f48-ec90-49d5-ac00-dfc23e2f0432" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2969.857841] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/9611fe96-37d4-4014-99b1-ea971bfb4ca7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2969.857957] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/9611fe96-37d4-4014-99b1-ea971bfb4ca7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2969.858326] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/9611fe96-37d4-4014-99b1-ea971bfb4ca7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2969.858585] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9088607a-d996-48fa-9290-02f47f853bbe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.862797] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2969.862797] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cf3e2-4443-bf5e-0d29-9a90c7dfc136" [ 2969.862797] env[62875]: _type = "Task" [ 2969.862797] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2969.870472] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cf3e2-4443-bf5e-0d29-9a90c7dfc136, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2970.373657] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]524cf3e2-4443-bf5e-0d29-9a90c7dfc136, 'name': SearchDatastore_Task, 'duration_secs': 0.009505} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2970.373983] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/9611fe96-37d4-4014-99b1-ea971bfb4ca7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2970.374224] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/59fe6534-7b64-4b33-b9a7-4a2c42980483" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2970.374345] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/59fe6534-7b64-4b33-b9a7-4a2c42980483" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2970.374698] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/59fe6534-7b64-4b33-b9a7-4a2c42980483" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2970.374974] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6c8bc68-54b9-4a97-bff7-05d13ddb9bde {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2970.379072] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2970.379072] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e5c40b-cdaf-eaa3-db11-6f62330cf4cc" [ 2970.379072] env[62875]: _type = "Task" [ 2970.379072] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2970.386160] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e5c40b-cdaf-eaa3-db11-6f62330cf4cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2970.892256] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52e5c40b-cdaf-eaa3-db11-6f62330cf4cc, 'name': SearchDatastore_Task, 'duration_secs': 0.009248} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2970.892564] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/59fe6534-7b64-4b33-b9a7-4a2c42980483" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2970.892842] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/5e3cf68c-5add-4cc6-b9cd-717ba1fe77cf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2970.892969] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/5e3cf68c-5add-4cc6-b9cd-717ba1fe77cf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2970.893303] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5e3cf68c-5add-4cc6-b9cd-717ba1fe77cf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2970.893553] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c45929d8-f2d3-43c8-af44-b7bc8414f840 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2970.897491] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2970.897491] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528a24f4-9e40-9c01-b41b-4d6d381a16c5" [ 2970.897491] env[62875]: _type = "Task" [ 2970.897491] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2970.904665] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528a24f4-9e40-9c01-b41b-4d6d381a16c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2971.407908] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]528a24f4-9e40-9c01-b41b-4d6d381a16c5, 'name': SearchDatastore_Task, 'duration_secs': 0.010297} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2971.408234] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/5e3cf68c-5add-4cc6-b9cd-717ba1fe77cf is no longer used. Deleting! [ 2971.408377] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/5e3cf68c-5add-4cc6-b9cd-717ba1fe77cf {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2971.408626] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8447cab9-718a-476c-8d57-28a69dc23dfc {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2971.414922] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2971.414922] env[62875]: value = "task-2180870" [ 2971.414922] env[62875]: _type = "Task" [ 2971.414922] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2971.422697] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2971.924833] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102639} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2971.925115] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2971.925235] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/5e3cf68c-5add-4cc6-b9cd-717ba1fe77cf" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2971.925463] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/fc826ad2-d658-4fbb-abda-fafcc9dbc24d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2971.925581] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/fc826ad2-d658-4fbb-abda-fafcc9dbc24d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2971.925914] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/fc826ad2-d658-4fbb-abda-fafcc9dbc24d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2971.926190] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b642faf8-d0d3-4064-a522-1832ebb1cbee {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2971.930188] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2971.930188] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5282ebfe-7f43-57af-2b9d-76c8329c7dff" [ 2971.930188] env[62875]: _type = "Task" [ 2971.930188] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2971.937394] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5282ebfe-7f43-57af-2b9d-76c8329c7dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2972.440883] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5282ebfe-7f43-57af-2b9d-76c8329c7dff, 'name': SearchDatastore_Task, 'duration_secs': 0.009465} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2972.441279] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/fc826ad2-d658-4fbb-abda-fafcc9dbc24d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2972.441534] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/a483fc75-f548-41a3-a1ce-bba350df596d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2972.441659] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/a483fc75-f548-41a3-a1ce-bba350df596d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2972.441978] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a483fc75-f548-41a3-a1ce-bba350df596d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2972.442260] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5f9cba1-2de2-401a-ac37-79070329c1e6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2972.446515] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2972.446515] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52659613-d7e9-8cca-ea1f-85c63597ce9e" [ 2972.446515] env[62875]: _type = "Task" [ 2972.446515] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2972.454369] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52659613-d7e9-8cca-ea1f-85c63597ce9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2972.957854] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52659613-d7e9-8cca-ea1f-85c63597ce9e, 'name': SearchDatastore_Task, 'duration_secs': 0.010124} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2972.958197] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/a483fc75-f548-41a3-a1ce-bba350df596d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2972.958392] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/51374fd7-5127-40cf-8644-dfd85790d362" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2972.958516] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/51374fd7-5127-40cf-8644-dfd85790d362" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2972.958836] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/51374fd7-5127-40cf-8644-dfd85790d362" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2972.959118] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7141ce9-ee5f-4cdc-b47d-4f4a6d5a83d7 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2972.963393] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2972.963393] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f7b93b-a94d-3bf5-9b67-eae5091857af" [ 2972.963393] env[62875]: _type = "Task" [ 2972.963393] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2972.970949] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f7b93b-a94d-3bf5-9b67-eae5091857af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2973.474704] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f7b93b-a94d-3bf5-9b67-eae5091857af, 'name': SearchDatastore_Task, 'duration_secs': 0.009376} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2973.475009] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/51374fd7-5127-40cf-8644-dfd85790d362 is no longer used. Deleting! [ 2973.476101] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/51374fd7-5127-40cf-8644-dfd85790d362 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2973.476101] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f8b3c9d-edb0-4882-ab76-7d8b32b9b301 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2973.481866] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2973.481866] env[62875]: value = "task-2180871" [ 2973.481866] env[62875]: _type = "Task" [ 2973.481866] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2973.488918] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2973.991687] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102207} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2973.992256] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2973.992256] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/51374fd7-5127-40cf-8644-dfd85790d362" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2973.992694] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/3bdd957a-10a4-4f1f-a3e5-0c33ab34da3f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2973.992694] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/3bdd957a-10a4-4f1f-a3e5-0c33ab34da3f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2973.992820] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3bdd957a-10a4-4f1f-a3e5-0c33ab34da3f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2973.993024] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ed1246e-9fec-4ac4-ad8f-5b1e2fe12b5f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2973.997771] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2973.997771] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52354e0f-3d0c-80dc-d367-471d2e88a2c1" [ 2973.997771] env[62875]: _type = "Task" [ 2973.997771] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2974.005790] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52354e0f-3d0c-80dc-d367-471d2e88a2c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2974.508257] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52354e0f-3d0c-80dc-d367-471d2e88a2c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009965} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2974.508585] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/3bdd957a-10a4-4f1f-a3e5-0c33ab34da3f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2974.508813] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/8d0173c6-16f4-4355-aad3-50350f2ec9ad" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2974.508936] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/8d0173c6-16f4-4355-aad3-50350f2ec9ad" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2974.509284] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8d0173c6-16f4-4355-aad3-50350f2ec9ad" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2974.509580] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a86214e0-58cd-4af3-bc5b-1f825227c9b5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2974.513844] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2974.513844] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb669e-135f-4bef-221c-eedc7e212fac" [ 2974.513844] env[62875]: _type = "Task" [ 2974.513844] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2974.521167] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb669e-135f-4bef-221c-eedc7e212fac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2975.024349] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52bb669e-135f-4bef-221c-eedc7e212fac, 'name': SearchDatastore_Task, 'duration_secs': 0.009148} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2975.024718] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image [datastore2] devstack-image-cache_base/8d0173c6-16f4-4355-aad3-50350f2ec9ad is no longer used. Deleting! [ 2975.024842] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleting the datastore file [datastore2] devstack-image-cache_base/8d0173c6-16f4-4355-aad3-50350f2ec9ad {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2975.025099] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-175e1298-8137-454c-bab1-3dcc3ad98dc1 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.031248] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2975.031248] env[62875]: value = "task-2180872" [ 2975.031248] env[62875]: _type = "Task" [ 2975.031248] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2975.038626] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2975.541883] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': task-2180872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100359} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2975.542094] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2975.542272] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/8d0173c6-16f4-4355-aad3-50350f2ec9ad" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2975.542530] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/e4e7ef29-a2b8-4d7f-83de-20587ed24a8b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2975.542653] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/e4e7ef29-a2b8-4d7f-83de-20587ed24a8b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2975.542990] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e4e7ef29-a2b8-4d7f-83de-20587ed24a8b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2975.543261] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b668f9aa-363e-43b3-8804-6c95586f3a37 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.547856] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2975.547856] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d11fba-fbf0-9605-61d3-4f33f4d3dc4e" [ 2975.547856] env[62875]: _type = "Task" [ 2975.547856] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2975.555259] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d11fba-fbf0-9605-61d3-4f33f4d3dc4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2976.058304] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52d11fba-fbf0-9605-61d3-4f33f4d3dc4e, 'name': SearchDatastore_Task, 'duration_secs': 0.009164} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2976.058709] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/e4e7ef29-a2b8-4d7f-83de-20587ed24a8b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2976.058838] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2976.058956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2976.059303] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2976.059606] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a560609e-230c-4b65-a8fb-b12ee6726c5d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.063855] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2976.063855] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522942fa-9c2a-6ed3-c5fa-6b310ff52944" [ 2976.063855] env[62875]: _type = "Task" [ 2976.063855] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2976.071503] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522942fa-9c2a-6ed3-c5fa-6b310ff52944, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2976.574791] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]522942fa-9c2a-6ed3-c5fa-6b310ff52944, 'name': SearchDatastore_Task, 'duration_secs': 0.009144} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2976.575058] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Creating directory with path [datastore2] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee/ts-2025-01-25-05-30-41 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2976.575316] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26a2e269-32bf-49f8-b2e7-6c500d8028af {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.588056] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Created directory with path [datastore2] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee/ts-2025-01-25-05-30-41 {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2976.588220] env[62875]: INFO nova.virt.vmwareapi.imagecache [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Image f9addc8a-f291-4e7a-a1c5-93144e13b3ee is no longer used by this node. Pending deletion! [ 2976.588369] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/f9addc8a-f291-4e7a-a1c5-93144e13b3ee" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2976.588590] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/8a22207b-d676-40af-a987-a5cc5ef30fe1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2976.588706] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/8a22207b-d676-40af-a987-a5cc5ef30fe1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2976.589032] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8a22207b-d676-40af-a987-a5cc5ef30fe1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2976.589270] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ce4e775-69a1-4b39-acd5-5ada95b9c226 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.593574] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2976.593574] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb37aa-e5df-1c28-b9ed-c2a84d6349d5" [ 2976.593574] env[62875]: _type = "Task" [ 2976.593574] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2976.600660] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb37aa-e5df-1c28-b9ed-c2a84d6349d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2977.105079] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52fb37aa-e5df-1c28-b9ed-c2a84d6349d5, 'name': SearchDatastore_Task, 'duration_secs': 0.008508} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2977.105484] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/8a22207b-d676-40af-a987-a5cc5ef30fe1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2977.105612] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/d4fba848-ca6c-4c6c-8ce0-4fc7a02a773b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2977.105734] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/d4fba848-ca6c-4c6c-8ce0-4fc7a02a773b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2977.106140] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d4fba848-ca6c-4c6c-8ce0-4fc7a02a773b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2977.106424] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72324520-c7ed-439b-82ff-2f9d09065c3b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.110852] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2977.110852] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c1052-1655-6ecb-5149-5d556794a56a" [ 2977.110852] env[62875]: _type = "Task" [ 2977.110852] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2977.119048] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c1052-1655-6ecb-5149-5d556794a56a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2977.621936] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]526c1052-1655-6ecb-5149-5d556794a56a, 'name': SearchDatastore_Task, 'duration_secs': 0.008952} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2977.622247] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/d4fba848-ca6c-4c6c-8ce0-4fc7a02a773b" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2977.622480] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/db238a3e-faea-47d3-864c-ef3712b5582e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2977.622602] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/db238a3e-faea-47d3-864c-ef3712b5582e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2977.622922] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/db238a3e-faea-47d3-864c-ef3712b5582e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2977.623198] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294f8b34-e35d-4677-95a0-b11803c0a46a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.627266] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2977.627266] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b37662-8a28-751c-7cef-8b10db53e3f7" [ 2977.627266] env[62875]: _type = "Task" [ 2977.627266] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2977.634458] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b37662-8a28-751c-7cef-8b10db53e3f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2978.082399] env[62875]: DEBUG nova.compute.manager [req-2ae40c88-6584-491a-99c8-68acd3117a1c req-f062a653-268b-4a29-95f6-6f2036045ea2 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received event network-vif-plugged-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2978.082617] env[62875]: DEBUG oslo_concurrency.lockutils [req-2ae40c88-6584-491a-99c8-68acd3117a1c req-f062a653-268b-4a29-95f6-6f2036045ea2 service nova] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2978.082826] env[62875]: DEBUG oslo_concurrency.lockutils [req-2ae40c88-6584-491a-99c8-68acd3117a1c req-f062a653-268b-4a29-95f6-6f2036045ea2 service nova] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2978.082992] env[62875]: DEBUG oslo_concurrency.lockutils [req-2ae40c88-6584-491a-99c8-68acd3117a1c req-f062a653-268b-4a29-95f6-6f2036045ea2 service nova] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2978.083172] env[62875]: DEBUG nova.compute.manager [req-2ae40c88-6584-491a-99c8-68acd3117a1c req-f062a653-268b-4a29-95f6-6f2036045ea2 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] No waiting events found dispatching network-vif-plugged-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2978.083337] env[62875]: WARNING nova.compute.manager [req-2ae40c88-6584-491a-99c8-68acd3117a1c req-f062a653-268b-4a29-95f6-6f2036045ea2 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received unexpected event network-vif-plugged-f4db9102-d2d3-403c-bfd9-37d1942d463b for instance with vm_state shelved_offloaded and task_state spawning. [ 2978.137352] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b37662-8a28-751c-7cef-8b10db53e3f7, 'name': SearchDatastore_Task, 'duration_secs': 0.010068} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2978.137674] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/db238a3e-faea-47d3-864c-ef3712b5582e" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2978.137913] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/8e4845cb-c943-4839-846e-eaaa0e05bce1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2978.138047] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/8e4845cb-c943-4839-846e-eaaa0e05bce1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2978.138375] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8e4845cb-c943-4839-846e-eaaa0e05bce1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2978.138636] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32cb3a0a-d17a-49a3-9aae-06c3d7b0f79d {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2978.143480] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2978.143480] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f50df1-808a-66ac-1689-70a619f0eeab" [ 2978.143480] env[62875]: _type = "Task" [ 2978.143480] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2978.150899] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f50df1-808a-66ac-1689-70a619f0eeab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2978.168515] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2978.168686] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2978.168882] env[62875]: DEBUG nova.network.neutron [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Building network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2978.653874] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52f50df1-808a-66ac-1689-70a619f0eeab, 'name': SearchDatastore_Task, 'duration_secs': 0.009603} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2978.654195] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/8e4845cb-c943-4839-846e-eaaa0e05bce1" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2978.654427] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/5648e58e-fbc8-490b-b5ad-d9da446089f2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2978.654552] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/5648e58e-fbc8-490b-b5ad-d9da446089f2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2978.654878] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5648e58e-fbc8-490b-b5ad-d9da446089f2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2978.655156] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3c248c6-f2b6-498f-aa13-1516bf32272f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2978.659297] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2978.659297] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52977058-be37-9876-7af4-1b0fd768362f" [ 2978.659297] env[62875]: _type = "Task" [ 2978.659297] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2978.666542] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52977058-be37-9876-7af4-1b0fd768362f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2978.868540] env[62875]: DEBUG nova.network.neutron [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4db9102-d2", "ovs_interfaceid": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2979.170372] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52977058-be37-9876-7af4-1b0fd768362f, 'name': SearchDatastore_Task, 'duration_secs': 0.0095} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2979.170758] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/5648e58e-fbc8-490b-b5ad-d9da446089f2" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2979.170926] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/ae0b72de-cfd3-4385-aa80-2a24cdf92ac0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2979.171055] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/ae0b72de-cfd3-4385-aa80-2a24cdf92ac0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2979.171410] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ae0b72de-cfd3-4385-aa80-2a24cdf92ac0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2979.171674] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64c45a86-1f7e-4115-922a-5531132d652f {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.176324] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2979.176324] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52752099-fb57-b657-563b-5577a00740a2" [ 2979.176324] env[62875]: _type = "Task" [ 2979.176324] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2979.183888] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52752099-fb57-b657-563b-5577a00740a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2979.371325] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2979.396793] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-25T04:48:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='efa0d8ef2949226fe3e2db22212723ca',container_format='bare',created_at=2025-01-25T05:30:00Z,direct_url=,disk_format='vmdk',id=74a2b5f5-272a-42b9-93bb-7e3de925645f,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-885324740-shelved',owner='ccc96aead000465a9613e6bb73d31721',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-01-25T05:30:13Z,virtual_size=,visibility=), allow threads: False {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2979.397056] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Flavor limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2979.397221] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Image limits 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2979.397405] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Flavor pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2979.397554] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Image pref 0:0:0 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2979.397733] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62875) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2979.397941] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2979.398128] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2979.398319] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Got 1 possible topologies {{(pid=62875) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2979.398485] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2979.398658] env[62875]: DEBUG nova.virt.hardware [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62875) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2979.399514] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3854305b-45ee-4989-999d-6062516f0de3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.407131] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeea45eb-4718-4f0f-a12e-9429d5fdf3a2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.421123] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:d9:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4db9102-d2d3-403c-bfd9-37d1942d463b', 'vif_model': 'vmxnet3'}] {{(pid=62875) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2979.428350] env[62875]: DEBUG oslo.service.loopingcall [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2979.428564] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Creating VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2979.428754] env[62875]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0f42d40-e530-49dc-bf89-3041fa51d695 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.447595] env[62875]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2979.447595] env[62875]: value = "task-2180873" [ 2979.447595] env[62875]: _type = "Task" [ 2979.447595] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2979.454743] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180873, 'name': CreateVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2979.687509] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52752099-fb57-b657-563b-5577a00740a2, 'name': SearchDatastore_Task, 'duration_secs': 0.009744} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2979.687944] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/ae0b72de-cfd3-4385-aa80-2a24cdf92ac0" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2979.688211] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/73a4a7cf-2b94-4127-beee-11e5a126b74d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2979.688347] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/73a4a7cf-2b94-4127-beee-11e5a126b74d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2979.688677] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/73a4a7cf-2b94-4127-beee-11e5a126b74d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2979.688939] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63f07230-9310-4270-b436-6453af6867c2 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.693277] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2979.693277] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5258e160-15d8-b325-9065-195695373242" [ 2979.693277] env[62875]: _type = "Task" [ 2979.693277] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2979.700694] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5258e160-15d8-b325-9065-195695373242, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2979.957631] env[62875]: DEBUG oslo_vmware.api [-] Task: {'id': task-2180873, 'name': CreateVM_Task, 'duration_secs': 0.391756} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2979.957787] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Created VM on the ESX host {{(pid=62875) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2979.958473] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2979.958640] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2979.959009] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2979.959266] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-922c98d1-b632-40a0-9a59-dd549a333fe5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.963851] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2979.963851] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5234ea67-0b39-67f5-165f-c2b2296805ab" [ 2979.963851] env[62875]: _type = "Task" [ 2979.963851] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2979.971427] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5234ea67-0b39-67f5-165f-c2b2296805ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2980.111551] env[62875]: DEBUG nova.compute.manager [req-4ab0e0de-cf51-4a1d-b953-2c8fd513a857 req-bf31d912-68c9-41e7-a3e6-ebb828ce46d3 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received event network-changed-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 2980.111750] env[62875]: DEBUG nova.compute.manager [req-4ab0e0de-cf51-4a1d-b953-2c8fd513a857 req-bf31d912-68c9-41e7-a3e6-ebb828ce46d3 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Refreshing instance network info cache due to event network-changed-f4db9102-d2d3-403c-bfd9-37d1942d463b. {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 2980.111963] env[62875]: DEBUG oslo_concurrency.lockutils [req-4ab0e0de-cf51-4a1d-b953-2c8fd513a857 req-bf31d912-68c9-41e7-a3e6-ebb828ce46d3 service nova] Acquiring lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2980.112121] env[62875]: DEBUG oslo_concurrency.lockutils [req-4ab0e0de-cf51-4a1d-b953-2c8fd513a857 req-bf31d912-68c9-41e7-a3e6-ebb828ce46d3 service nova] Acquired lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2980.112286] env[62875]: DEBUG nova.network.neutron [req-4ab0e0de-cf51-4a1d-b953-2c8fd513a857 req-bf31d912-68c9-41e7-a3e6-ebb828ce46d3 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Refreshing network info cache for port f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2980.203230] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]5258e160-15d8-b325-9065-195695373242, 'name': SearchDatastore_Task, 'duration_secs': 0.009063} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2980.203583] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/73a4a7cf-2b94-4127-beee-11e5a126b74d" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2980.203805] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2980.203896] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2980.204242] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2980.204504] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5244c296-2d50-433b-baf0-47b25a678e50 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2980.208751] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2980.208751] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eaee9a-a518-e883-bb6b-abda6943acbc" [ 2980.208751] env[62875]: _type = "Task" [ 2980.208751] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2980.215926] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eaee9a-a518-e883-bb6b-abda6943acbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2980.474293] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2980.474563] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Processing image 74a2b5f5-272a-42b9-93bb-7e3de925645f {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2980.474842] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f/74a2b5f5-272a-42b9-93bb-7e3de925645f.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2980.475052] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f/74a2b5f5-272a-42b9-93bb-7e3de925645f.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2980.475284] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2980.475537] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9449bc1b-f4e4-4125-a9a4-ace1699e489b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2980.483253] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2980.483411] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62875) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2980.484082] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f402d7ab-4fcb-4e99-a6c2-4dd203445699 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2980.488652] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2980.488652] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52838295-f3b4-7495-4a00-b2a7db8526fe" [ 2980.488652] env[62875]: _type = "Task" [ 2980.488652] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2980.495759] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52838295-f3b4-7495-4a00-b2a7db8526fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2980.728767] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52eaee9a-a518-e883-bb6b-abda6943acbc, 'name': SearchDatastore_Task, 'duration_secs': 0.035254} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2980.729014] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2980.729243] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2980.729367] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2980.729631] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2980.729916] env[62875]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bc6478e-f3ce-4c30-be05-15a8817de869 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2980.734616] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Waiting for the task: (returnval){ [ 2980.734616] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ea57e3-05bf-8c6f-12ac-67b0cfd60107" [ 2980.734616] env[62875]: _type = "Task" [ 2980.734616] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2980.745108] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ea57e3-05bf-8c6f-12ac-67b0cfd60107, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2980.846337] env[62875]: DEBUG nova.network.neutron [req-4ab0e0de-cf51-4a1d-b953-2c8fd513a857 req-bf31d912-68c9-41e7-a3e6-ebb828ce46d3 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updated VIF entry in instance network info cache for port f4db9102-d2d3-403c-bfd9-37d1942d463b. {{(pid=62875) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2980.846685] env[62875]: DEBUG nova.network.neutron [req-4ab0e0de-cf51-4a1d-b953-2c8fd513a857 req-bf31d912-68c9-41e7-a3e6-ebb828ce46d3 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4db9102-d2", "ovs_interfaceid": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2980.998483] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Preparing fetch location {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2980.998669] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Fetch image to [datastore2] OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4/OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4.vmdk {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2980.998852] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Downloading stream optimized image 74a2b5f5-272a-42b9-93bb-7e3de925645f to [datastore2] OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4/OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4.vmdk on the data store datastore2 as vApp {{(pid=62875) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2980.999034] env[62875]: DEBUG nova.virt.vmwareapi.images [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Downloading image file data 74a2b5f5-272a-42b9-93bb-7e3de925645f to the ESX as VM named 'OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4' {{(pid=62875) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2981.063820] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2981.063820] env[62875]: value = "resgroup-9" [ 2981.063820] env[62875]: _type = "ResourcePool" [ 2981.063820] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2981.064120] env[62875]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-dea7a0fa-a62f-402d-8635-e09de857e23c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.084209] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lease: (returnval){ [ 2981.084209] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b48eac-f0a0-8b27-4534-b190779ef7a2" [ 2981.084209] env[62875]: _type = "HttpNfcLease" [ 2981.084209] env[62875]: } obtained for vApp import into resource pool (val){ [ 2981.084209] env[62875]: value = "resgroup-9" [ 2981.084209] env[62875]: _type = "ResourcePool" [ 2981.084209] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2981.084513] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the lease: (returnval){ [ 2981.084513] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b48eac-f0a0-8b27-4534-b190779ef7a2" [ 2981.084513] env[62875]: _type = "HttpNfcLease" [ 2981.084513] env[62875]: } to be ready. {{(pid=62875) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2981.090250] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2981.090250] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b48eac-f0a0-8b27-4534-b190779ef7a2" [ 2981.090250] env[62875]: _type = "HttpNfcLease" [ 2981.090250] env[62875]: } is initializing. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2981.245791] env[62875]: DEBUG oslo_vmware.api [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Task: {'id': session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52ea57e3-05bf-8c6f-12ac-67b0cfd60107, 'name': SearchDatastore_Task, 'duration_secs': 0.017152} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2981.246182] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "[datastore2] devstack-image-cache_base/a9637bcc-4de8-4ea1-be59-4c697becf2a7" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2981.246321] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2981.246466] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 2981.349393] env[62875]: DEBUG oslo_concurrency.lockutils [req-4ab0e0de-cf51-4a1d-b953-2c8fd513a857 req-bf31d912-68c9-41e7-a3e6-ebb828ce46d3 service nova] Releasing lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2981.592674] env[62875]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2981.592674] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b48eac-f0a0-8b27-4534-b190779ef7a2" [ 2981.592674] env[62875]: _type = "HttpNfcLease" [ 2981.592674] env[62875]: } is ready. {{(pid=62875) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2981.593047] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2981.593047] env[62875]: value = "session[52f0c584-4687-8c25-2ef6-c44f88bd9de0]52b48eac-f0a0-8b27-4534-b190779ef7a2" [ 2981.593047] env[62875]: _type = "HttpNfcLease" [ 2981.593047] env[62875]: }. {{(pid=62875) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2981.593664] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46dbdc1-1aa6-406d-8503-ef91bc9e21c0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.600683] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249ffab-9fd2-83f4-3e29-9e47c8a1cf22/disk-0.vmdk from lease info. {{(pid=62875) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2981.600808] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249ffab-9fd2-83f4-3e29-9e47c8a1cf22/disk-0.vmdk. {{(pid=62875) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2981.664434] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0ce01618-d8b8-47a3-8260-8c8f54b967ed {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.751056] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] There are 7 instances to clean {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 2981.751300] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 2e1129fe-f32c-4868-bc68-a39ea14fe9d2] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2982.254942] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 61cc11b0-56bf-48ce-82e6-64c5d91d177c] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2982.731606] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Completed reading data from the image iterator. {{(pid=62875) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2982.732014] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249ffab-9fd2-83f4-3e29-9e47c8a1cf22/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2982.733093] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075ddd2c-6193-40d6-a658-ac74376c3289 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.740737] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249ffab-9fd2-83f4-3e29-9e47c8a1cf22/disk-0.vmdk is in state: ready. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2982.740979] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249ffab-9fd2-83f4-3e29-9e47c8a1cf22/disk-0.vmdk. {{(pid=62875) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2982.741270] env[62875]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-f7a0e7b1-5e94-4858-83d6-798e5612dfb5 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.760757] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 894b9113-47ae-4b50-ae42-682be81324ba] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2982.910914] env[62875]: DEBUG oslo_vmware.rw_handles [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249ffab-9fd2-83f4-3e29-9e47c8a1cf22/disk-0.vmdk. {{(pid=62875) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2982.911125] env[62875]: INFO nova.virt.vmwareapi.images [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Downloaded image file data 74a2b5f5-272a-42b9-93bb-7e3de925645f [ 2982.911929] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863e79ca-7444-4398-a05c-1389eedcbb6b {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.927472] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-903ff3f3-3f4d-41e3-9180-4828af25362e {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.963460] env[62875]: INFO nova.virt.vmwareapi.images [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] The imported VM was unregistered [ 2982.965655] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Caching image {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2982.965889] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Creating directory with path [datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2982.966176] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b37dcc24-ced1-4245-b4c9-fb3f991f5940 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.976573] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Created directory with path [datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f {{(pid=62875) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2982.976792] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4/OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4.vmdk to [datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f/74a2b5f5-272a-42b9-93bb-7e3de925645f.vmdk. {{(pid=62875) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2982.977062] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bea699dc-b72e-46d4-b13d-e9415c346311 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.983591] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2982.983591] env[62875]: value = "task-2180876" [ 2982.983591] env[62875]: _type = "Task" [ 2982.983591] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2982.992555] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180876, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2983.264135] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 467b53e0-1614-4624-841d-1310271825bc] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2983.494227] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180876, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2983.767858] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 0ea0366f-3383-4da0-abf4-c8cbfa199809] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2983.994866] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180876, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2984.272013] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a9cc9da5-b40c-492d-92a5-85e760290be9] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2984.495341] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180876, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2984.775925] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: a5f9b278-6c02-4d5e-997a-97a8fa8944ca] Instance has had 0 of 5 cleanup attempts {{(pid=62875) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 2984.995353] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180876, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2985.279372] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2985.279751] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Cleaning up deleted instances with incomplete migration {{(pid=62875) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 2985.495841] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180876, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2985.782306] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2985.995383] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180876, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2986.496360] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180876, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.156855} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2986.496753] env[62875]: INFO nova.virt.vmwareapi.ds_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4/OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4.vmdk to [datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f/74a2b5f5-272a-42b9-93bb-7e3de925645f.vmdk. [ 2986.496753] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Cleaning up location [datastore2] OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4 {{(pid=62875) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2986.496923] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_b23b7005-e27e-43ee-bf32-6fc498d5dbd4 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2986.497145] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb61b777-dfd1-4395-b365-7c6f6c256052 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2986.503730] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2986.503730] env[62875]: value = "task-2180877" [ 2986.503730] env[62875]: _type = "Task" [ 2986.503730] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2986.510699] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180877, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2987.013814] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180877, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131366} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2987.014093] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2987.014265] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f/74a2b5f5-272a-42b9-93bb-7e3de925645f.vmdk" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2987.014511] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f/74a2b5f5-272a-42b9-93bb-7e3de925645f.vmdk to [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8/00e65eb9-1db1-4456-9603-8b4cbff8ffe8.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2987.014765] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4193594d-61ef-4760-98a5-6b3a1ef9c41c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2987.021469] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2987.021469] env[62875]: value = "task-2180878" [ 2987.021469] env[62875]: _type = "Task" [ 2987.021469] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2987.029925] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2987.531139] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180878, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2988.032103] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180878, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2988.532643] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180878, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2989.033295] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180878, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2989.534838] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180878, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.079794} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2989.535123] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/74a2b5f5-272a-42b9-93bb-7e3de925645f/74a2b5f5-272a-42b9-93bb-7e3de925645f.vmdk to [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8/00e65eb9-1db1-4456-9603-8b4cbff8ffe8.vmdk {{(pid=62875) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2989.535873] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ffc845-a94d-451c-ba97-1e96f36270f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2989.557306] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8/00e65eb9-1db1-4456-9603-8b4cbff8ffe8.vmdk or device None with type streamOptimized {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2989.557544] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-961dcd7e-6918-4691-b5a0-1261e205e7f0 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2989.577342] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2989.577342] env[62875]: value = "task-2180879" [ 2989.577342] env[62875]: _type = "Task" [ 2989.577342] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2989.584432] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2990.087671] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180879, 'name': ReconfigVM_Task, 'duration_secs': 0.273467} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2990.087937] env[62875]: DEBUG nova.virt.vmwareapi.volumeops [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8/00e65eb9-1db1-4456-9603-8b4cbff8ffe8.vmdk or device None with type streamOptimized {{(pid=62875) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2990.089419] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-432163f6-4e4a-4f05-83f0-d979553c75fe {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2990.095074] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2990.095074] env[62875]: value = "task-2180880" [ 2990.095074] env[62875]: _type = "Task" [ 2990.095074] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2990.102339] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180880, 'name': Rename_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2990.604990] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180880, 'name': Rename_Task, 'duration_secs': 0.140856} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2990.605365] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powering on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2990.605527] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99eeb0ee-b831-4a63-a2b9-6b546f2b7371 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2990.612107] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 2990.612107] env[62875]: value = "task-2180881" [ 2990.612107] env[62875]: _type = "Task" [ 2990.612107] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2990.619845] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180881, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2991.122249] env[62875]: DEBUG oslo_vmware.api [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180881, 'name': PowerOnVM_Task, 'duration_secs': 0.431652} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2991.122517] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powered on the VM {{(pid=62875) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2991.214369] env[62875]: DEBUG nova.compute.manager [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Checking state {{(pid=62875) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2991.215309] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eec6ad2-f974-4b9e-9d74-f37f226f5fb3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2991.732064] env[62875]: DEBUG oslo_concurrency.lockutils [None req-d61908db-3bd9-4756-acb3-8e47d334364c tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 28.377s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3017.284883] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3017.285316] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3017.790827] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3017.790973] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Starting heal instance info cache {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 3017.791108] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Rebuilding the list of instances to heal {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 3018.323893] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3018.324110] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquired lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3018.324240] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Forcefully refreshing network info cache for instance {{(pid=62875) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 3018.324396] env[62875]: DEBUG nova.objects.instance [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lazy-loading 'info_cache' on Instance uuid 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 3020.044655] env[62875]: DEBUG nova.network.neutron [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [{"id": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "address": "fa:16:3e:84:d9:45", "network": {"id": "014d9584-3849-4555-9c98-d2365997f7c3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-700794013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc96aead000465a9613e6bb73d31721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4db9102-d2", "ovs_interfaceid": "f4db9102-d2d3-403c-bfd9-37d1942d463b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3020.548017] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Releasing lock "refresh_cache-00e65eb9-1db1-4456-9603-8b4cbff8ffe8" {{(pid=62875) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3020.548276] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updated the network info_cache for instance {{(pid=62875) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 3020.548510] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3020.548719] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3020.548882] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3020.549058] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3020.549218] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3020.549368] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3020.549498] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 3020.549666] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3021.052868] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3021.053221] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3021.053335] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3021.053499] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62875) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 3021.054784] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af7e667-b3f1-4aad-9621-724fcbb3d268 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.063058] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e50e5e-3e30-43f1-969c-9d85a984a092 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.077020] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f330c24e-bbc7-415c-9245-18a5de83156a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.083505] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe6215d-9765-418a-8c8f-5d609a237ace {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.114384] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180781MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62875) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 3021.114539] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3021.114706] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3022.185348] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Instance 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62875) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3022.185604] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 3022.185702] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62875) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 3022.212361] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c11c06-cc17-427f-8cbc-90be37dfae9a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.219965] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4896377f-3eb5-4170-abad-f7e2e941ba50 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.248984] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0c85a6-340b-4209-8660-e2789387d853 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.256061] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a793fb8-6e42-4dcf-9fe6-54fd93a84b52 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.268776] env[62875]: DEBUG nova.compute.provider_tree [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3022.772248] env[62875]: DEBUG nova.scheduler.client.report [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 3023.278338] env[62875]: DEBUG nova.compute.resource_tracker [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62875) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 3023.278689] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.164s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3028.275973] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3028.276372] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3028.276492] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3028.276686] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3028.276862] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3028.280124] env[62875]: INFO nova.compute.manager [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Terminating instance [ 3028.784258] env[62875]: DEBUG nova.compute.manager [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Start destroying the instance on the hypervisor. {{(pid=62875) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 3028.784525] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Destroying instance {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 3028.785398] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb916438-5f2a-472b-96a9-86264f8546de {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3028.793199] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powering off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 3028.793423] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b099f3dc-38bd-42f8-9c39-7fb2ef54c8a3 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3028.799997] env[62875]: DEBUG oslo_vmware.api [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 3028.799997] env[62875]: value = "task-2180882" [ 3028.799997] env[62875]: _type = "Task" [ 3028.799997] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3028.807813] env[62875]: DEBUG oslo_vmware.api [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3029.310177] env[62875]: DEBUG oslo_vmware.api [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180882, 'name': PowerOffVM_Task, 'duration_secs': 0.169366} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3029.310572] env[62875]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Powered off the VM {{(pid=62875) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 3029.310674] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Unregistering the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 3029.310882] env[62875]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4515d869-71fb-4cf1-85a9-91bab844db47 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3033.885128] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Unregistered the VM {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 3033.885531] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Deleting contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 3033.885531] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleting the datastore file [datastore2] 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3033.885807] env[62875]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea7a573c-cb79-4bed-8064-af9d949d349c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3033.892158] env[62875]: DEBUG oslo_vmware.api [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for the task: (returnval){ [ 3033.892158] env[62875]: value = "task-2180884" [ 3033.892158] env[62875]: _type = "Task" [ 3033.892158] env[62875]: } to complete. {{(pid=62875) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3033.899415] env[62875]: DEBUG oslo_vmware.api [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3034.402461] env[62875]: DEBUG oslo_vmware.api [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Task: {'id': task-2180884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139951} completed successfully. {{(pid=62875) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3034.402711] env[62875]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted the datastore file {{(pid=62875) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3034.402876] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Deleted contents of the VM from datastore datastore2 {{(pid=62875) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 3034.403064] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Instance destroyed {{(pid=62875) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 3034.403248] env[62875]: INFO nova.compute.manager [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Took 5.62 seconds to destroy the instance on the hypervisor. [ 3034.403505] env[62875]: DEBUG oslo.service.loopingcall [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62875) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3034.403688] env[62875]: DEBUG nova.compute.manager [-] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Deallocating network for instance {{(pid=62875) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 3034.403850] env[62875]: DEBUG nova.network.neutron [-] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] deallocate_for_instance() {{(pid=62875) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 3035.073087] env[62875]: DEBUG nova.compute.manager [req-21c69d88-08e1-4b8f-8e29-9369d7e8079e req-740da2a5-248e-413c-b0f6-549fd6bad9a1 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Received event network-vif-deleted-f4db9102-d2d3-403c-bfd9-37d1942d463b {{(pid=62875) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 3035.073421] env[62875]: INFO nova.compute.manager [req-21c69d88-08e1-4b8f-8e29-9369d7e8079e req-740da2a5-248e-413c-b0f6-549fd6bad9a1 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Neutron deleted interface f4db9102-d2d3-403c-bfd9-37d1942d463b; detaching it from the instance and deleting it from the info cache [ 3035.073628] env[62875]: DEBUG nova.network.neutron [req-21c69d88-08e1-4b8f-8e29-9369d7e8079e req-740da2a5-248e-413c-b0f6-549fd6bad9a1 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3035.555044] env[62875]: DEBUG nova.network.neutron [-] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Updating instance_info_cache with network_info: [] {{(pid=62875) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3035.576725] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-900b34ea-5c4d-4377-8499-1fba21fb0df6 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3035.586323] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7b0e0e-1028-4cf8-9e22-0cf2fc2a14f8 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3035.611169] env[62875]: DEBUG nova.compute.manager [req-21c69d88-08e1-4b8f-8e29-9369d7e8079e req-740da2a5-248e-413c-b0f6-549fd6bad9a1 service nova] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Detach interface failed, port_id=f4db9102-d2d3-403c-bfd9-37d1942d463b, reason: Instance 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 could not be found. {{(pid=62875) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 3036.056895] env[62875]: INFO nova.compute.manager [-] [instance: 00e65eb9-1db1-4456-9603-8b4cbff8ffe8] Took 1.65 seconds to deallocate network for instance. [ 3036.564273] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3036.564633] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3036.564785] env[62875]: DEBUG nova.objects.instance [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lazy-loading 'resources' on Instance uuid 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 3037.097697] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee552574-fc64-445b-94aa-8089e41fe7bb {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3037.104846] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a96a753-3359-434a-a027-bea01e2a7201 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3037.134415] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e97bd7-153a-4abd-b9c7-6498f2b2b857 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3037.141627] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c75a1b-97d7-45cf-ace5-8a903d85bc16 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3037.154239] env[62875]: DEBUG nova.compute.provider_tree [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed in ProviderTree for provider: 2d6e5fad-ed55-4f17-b68d-be9dae183a02 {{(pid=62875) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3037.657788] env[62875]: DEBUG nova.scheduler.client.report [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Inventory has not changed for provider 2d6e5fad-ed55-4f17-b68d-be9dae183a02 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62875) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 3038.034051] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3038.162956] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3038.182449] env[62875]: INFO nova.scheduler.client.report [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Deleted allocations for instance 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 [ 3038.537332] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Getting list of instances from cluster (obj){ [ 3038.537332] env[62875]: value = "domain-c8" [ 3038.537332] env[62875]: _type = "ClusterComputeResource" [ 3038.537332] env[62875]: } {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 3038.538406] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8cf594-180e-4557-ad90-d8b77c812f7c {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.547555] env[62875]: DEBUG nova.virt.vmwareapi.vmops [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Got total of 0 instances {{(pid=62875) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 3038.547715] env[62875]: WARNING nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] While synchronizing instance power states, found 1 instances in the database and 0 instances on the hypervisor. [ 3038.547855] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Triggering sync for uuid 00e65eb9-1db1-4456-9603-8b4cbff8ffe8 {{(pid=62875) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 3038.548178] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Acquiring lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3038.689492] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6d9ffc6c-aac2-49f5-ba36-858978e40e3e tempest-AttachVolumeShelveTestJSON-262331675 tempest-AttachVolumeShelveTestJSON-262331675-project-member] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.413s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3038.690695] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.142s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3038.690893] env[62875]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b232fb0-1cab-4d61-a4d0-13e0afa9bd83 {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.699299] env[62875]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68978cc3-8ed5-418f-971c-3dc2bfcf0b8a {{(pid=62875) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3039.224482] env[62875]: DEBUG oslo_concurrency.lockutils [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Lock "00e65eb9-1db1-4456-9603-8b4cbff8ffe8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.534s {{(pid=62875) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3058.220925] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3060.706808] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3060.707122] env[62875]: DEBUG oslo_service.periodic_task [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62875) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3060.707225] env[62875]: DEBUG nova.compute.manager [None req-6164727a-de00-43e5-97ba-c7bdc20e8ccf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62875) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}}